summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/channels/application_cable/connection_spec.rb2
-rw-r--r--spec/channels/awareness_channel_spec.rb3
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb4
-rw-r--r--spec/components/diffs/overflow_warning_component_spec.rb2
-rw-r--r--spec/components/diffs/stats_component_spec.rb2
-rw-r--r--spec/components/docs/01_overview.html.erb20
-rw-r--r--spec/components/pajamas/avatar_component_spec.rb135
-rw-r--r--spec/components/pajamas/banner_component_spec.rb2
-rw-r--r--spec/components/pajamas/button_component_spec.rb54
-rw-r--r--spec/components/pajamas/checkbox_component_spec.rb6
-rw-r--r--spec/components/pajamas/checkbox_tag_component_spec.rb59
-rw-r--r--spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb77
-rw-r--r--spec/components/previews/pajamas/alert_component_preview.rb19
-rw-r--r--spec/components/previews/pajamas/avatar_component_preview.rb27
-rw-r--r--spec/components/previews/pajamas/banner_component_preview.rb54
-rw-r--r--spec/components/previews/pajamas/button_component_preview.rb56
-rw-r--r--spec/components/previews/pajamas/card_component_preview.rb27
-rw-r--r--spec/components/previews/pajamas/spinner_component_preview.rb22
-rw-r--r--spec/contracts/consumer/endpoints/project/pipelines.js16
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js10
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js10
-rw-r--r--spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js10
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/create_a_new_pipeline.fixture.js39
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/delete_pipeline.fixture.js24
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js10
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js9
-rw-r--r--spec/contracts/consumer/fixtures/project/pipeline_schedule/update_pipeline_schedule.fixture.js44
-rw-r--r--spec/contracts/consumer/helpers/common_regex_patterns.js1
-rw-r--r--spec/contracts/consumer/resources/api/pipeline_schedules.js26
-rw-r--r--spec/contracts/consumer/resources/api/project/merge_requests.js (renamed from spec/contracts/consumer/endpoints/project/merge_requests.js)14
-rw-r--r--spec/contracts/consumer/resources/api/project/pipelines.js34
-rw-r--r--spec/contracts/consumer/resources/graphql/pipelines.js21
-rw-r--r--spec/contracts/consumer/specs/project/merge_request/show.spec.js36
-rw-r--r--spec/contracts/consumer/specs/project/pipeline/index.spec.js16
-rw-r--r--spec/contracts/consumer/specs/project/pipeline/new.spec.js41
-rw-r--r--spec/contracts/consumer/specs/project/pipeline/show.spec.js54
-rw-r--r--spec/contracts/consumer/specs/project/pipeline_schedule/edit.spec.js41
-rw-r--r--spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json4
-rw-r--r--spec/contracts/contracts/project/pipeline/new/pipelines#new-post_create_a_new_pipeline.json43
-rw-r--r--spec/contracts/contracts/project/pipeline/show/pipelines#show-delete_pipeline.json44
-rw-r--r--spec/contracts/contracts/project/pipeline_schedule/edit/pipelineschedules#edit-put_edit_a_pipeline_schedule.json48
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_batch_helper.rb (renamed from spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb)4
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_metadata_helper.rb (renamed from spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb)4
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_request/show/discussions_helper.rb (renamed from spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb)4
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/index/create_a_new_pipeline_helper.rb16
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/index/get_list_project_pipelines_helper.rb (renamed from spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb)4
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/show/delete_pipeline_helper.rb16
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline/show/get_pipeline_header_data_helper.rb (renamed from spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb)4
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline_schedule/update_pipeline_schedule_helper.rb16
-rw-r--r--spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb18
-rw-r--r--spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb18
-rw-r--r--spec/contracts/provider/states/project/merge_request/discussions_state.rb17
-rw-r--r--spec/contracts/provider/states/project/merge_request/show_state.rb47
-rw-r--r--spec/contracts/provider/states/project/pipeline/index_state.rb (renamed from spec/contracts/provider/states/project/pipeline/pipelines_state.rb)0
-rw-r--r--spec/contracts/provider/states/project/pipeline/new_state.rb24
-rw-r--r--spec/contracts/provider/states/project/pipeline/show_state.rb (renamed from spec/contracts/provider/states/project/pipeline/pipeline_state.rb)1
-rw-r--r--spec/contracts/provider/states/project/pipeline_schedule/edit_state.rb15
-rw-r--r--spec/controllers/admin/dev_ops_report_controller_spec.rb11
-rw-r--r--spec/controllers/admin/identities_controller_spec.rb24
-rw-r--r--spec/controllers/admin/topics_controller_spec.rb40
-rw-r--r--spec/controllers/admin/usage_trends_controller_spec.rb13
-rw-r--r--spec/controllers/admin/users_controller_spec.rb6
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb104
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb38
-rw-r--r--spec/controllers/profiles/personal_access_tokens_controller_spec.rb32
-rw-r--r--spec/controllers/profiles_controller_spec.rb6
-rw-r--r--spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb48
-rw-r--r--spec/controllers/projects/blob_controller_spec.rb1
-rw-r--r--spec/controllers/projects/ci/secure_files_controller_spec.rb67
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb12
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb17
-rw-r--r--spec/controllers/projects/compare_controller_spec.rb19
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb2
-rw-r--r--spec/controllers/projects/hooks_controller_spec.rb2
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb32
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb46
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb71
-rw-r--r--spec/controllers/projects/pages_controller_spec.rb48
-rw-r--r--spec/controllers/projects/protected_branches_controller_spec.rb54
-rw-r--r--spec/controllers/projects/tags/releases_controller_spec.rb103
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb184
-rw-r--r--spec/controllers/projects_controller_spec.rb82
-rw-r--r--spec/controllers/registrations_controller_spec.rb8
-rw-r--r--spec/controllers/search_controller_spec.rb67
-rw-r--r--spec/events/ci/pipeline_created_event_spec.rb27
-rw-r--r--spec/events/pages/page_deleted_event_spec.rb35
-rw-r--r--spec/events/pages/page_deployed_event_spec.rb34
-rw-r--r--spec/events/projects/project_created_event_spec.rb34
-rw-r--r--spec/events/projects/project_deleted_event_spec.rb35
-rw-r--r--spec/events/projects/project_path_changed_event_spec.rb46
-rw-r--r--spec/factories/ci/builds.rb27
-rw-r--r--spec/factories/ci/job_artifacts.rb22
-rw-r--r--spec/factories/ci/runners.rb4
-rw-r--r--spec/factories/ci/secure_files.rb6
-rw-r--r--spec/factories/ci/variables.rb1
-rw-r--r--spec/factories/gitlab/database/async_indexes/postgres_async_index.rb4
-rw-r--r--spec/factories/member_roles.rb8
-rw-r--r--spec/factories/projects.rb10
-rw-r--r--spec/factories/users.rb2
-rw-r--r--spec/factories/users/project_user_callouts.rb10
-rw-r--r--spec/factories/work_items.rb4
-rw-r--r--spec/features/admin/admin_appearance_spec.rb233
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb342
-rw-r--r--spec/features/admin/users/users_spec.rb4
-rw-r--r--spec/features/admin_variables_spec.rb34
-rw-r--r--spec/features/boards/board_filters_spec.rb4
-rw-r--r--spec/features/boards/boards_spec.rb8
-rw-r--r--spec/features/boards/reload_boards_on_browser_back_spec.rb6
-rw-r--r--spec/features/clusters/create_agent_spec.rb5
-rw-r--r--spec/features/cycle_analytics_spec.rb18
-rw-r--r--spec/features/dashboard/archived_projects_spec.rb2
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb78
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb4
-rw-r--r--spec/features/error_tracking/user_filters_errors_by_status_spec.rb4
-rw-r--r--spec/features/group_variables_spec.rb10
-rw-r--r--spec/features/groups/crm/contacts/create_spec.rb4
-rw-r--r--spec/features/groups/group_runners_spec.rb74
-rw-r--r--spec/features/groups/issues_spec.rb6
-rw-r--r--spec/features/groups/members/manage_members_spec.rb4
-rw-r--r--spec/features/groups/show_spec.rb45
-rw-r--r--spec/features/groups_spec.rb80
-rw-r--r--spec/features/invites_spec.rb22
-rw-r--r--spec/features/issuables/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/issues/form_spec.rb7
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb2
-rw-r--r--spec/features/issues/incident_issue_spec.rb70
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb8
-rw-r--r--spec/features/issues/related_issues_spec.rb20
-rw-r--r--spec/features/issues/todo_spec.rb2
-rw-r--r--spec/features/issues/user_bulk_edits_issues_spec.rb2
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb8
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb6
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb2
-rw-r--r--spec/features/issues/user_uses_quick_actions_spec.rb2
-rw-r--r--spec/features/markdown/copy_as_gfm_spec.rb19
-rw-r--r--spec/features/markdown/gitlab_flavored_markdown_spec.rb2
-rw-r--r--spec/features/markdown/json_table_spec.rb40
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb10
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb6
-rw-r--r--spec/features/merge_request/user_approves_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_merge_request_spec.rb39
-rw-r--r--spec/features/merge_request/user_customizes_merge_commit_message_spec.rb4
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_merge_request_spec.rb21
-rw-r--r--spec/features/merge_request/user_opens_context_commits_modal_spec.rb26
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb1
-rw-r--r--spec/features/merge_request/user_sees_closing_issues_message_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_deployment_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb11
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb2
-rw-r--r--spec/features/merge_request/user_uses_quick_actions_spec.rb2
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb14
-rw-r--r--spec/features/oauth_registration_spec.rb39
-rw-r--r--spec/features/populate_new_pipeline_vars_with_params_spec.rb2
-rw-r--r--spec/features/profile_spec.rb2
-rw-r--r--spec/features/profiles/password_spec.rb15
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb6
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb4
-rw-r--r--spec/features/profiles/user_visits_profile_spec.rb20
-rw-r--r--spec/features/projects/blobs/blob_show_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb5
-rw-r--r--spec/features/projects/ci/lint_spec.rb5
-rw-r--r--spec/features/projects/ci/secure_files_spec.rb61
-rw-r--r--spec/features/projects/cluster_agents_spec.rb2
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb2
-rw-r--r--spec/features/projects/compare_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb8
-rw-r--r--spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb2
-rw-r--r--spec/features/projects/files/user_browses_files_spec.rb4
-rw-r--r--spec/features/projects/files/user_creates_files_spec.rb19
-rw-r--r--spec/features/projects/files/user_edits_files_spec.rb26
-rw-r--r--spec/features/projects/issuable_templates_spec.rb28
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb1
-rw-r--r--spec/features/projects/members/manage_groups_spec.rb2
-rw-r--r--spec/features/projects/members/manage_members_spec.rb200
-rw-r--r--spec/features/projects/merge_request_button_spec.rb20
-rw-r--r--spec/features/projects/new_project_spec.rb32
-rw-r--r--spec/features/projects/pages/user_adds_domain_spec.rb6
-rw-r--r--spec/features/projects/pages/user_configures_pages_pipeline_spec.rb59
-rw-r--r--spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb3
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb13
-rw-r--r--spec/features/projects/pipeline_schedules_spec.rb7
-rw-r--r--spec/features/projects/pipelines/legacy_pipeline_spec.rb31
-rw-r--r--spec/features/projects/pipelines/legacy_pipelines_spec.rb3
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb9
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb3
-rw-r--r--spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb49
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb4
-rw-r--r--spec/features/projects/settings/service_desk_setting_spec.rb2
-rw-r--r--spec/features/projects/tags/user_edits_tags_spec.rb74
-rw-r--r--spec/features/projects/tags/user_views_tag_spec.rb8
-rw-r--r--spec/features/projects/tags/user_views_tags_spec.rb30
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb4
-rw-r--r--spec/features/projects/tree/create_file_spec.rb4
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb2
-rw-r--r--spec/features/projects_spec.rb30
-rw-r--r--spec/features/runners_spec.rb6
-rw-r--r--spec/features/search/user_searches_for_commits_spec.rb2
-rw-r--r--spec/features/signed_commits_spec.rb2
-rw-r--r--spec/features/tags/developer_updates_tag_spec.rb56
-rw-r--r--spec/features/task_lists_spec.rb12
-rw-r--r--spec/features/uploads/user_uploads_avatar_to_profile_spec.rb2
-rw-r--r--spec/features/uploads/user_uploads_file_to_note_spec.rb11
-rw-r--r--spec/features/users/email_verification_on_login_spec.rb2
-rw-r--r--spec/features/users/login_spec.rb38
-rw-r--r--spec/features/users/show_spec.rb10
-rw-r--r--spec/features/users/signup_spec.rb465
-rw-r--r--spec/finders/autocomplete/deploy_keys_with_write_access_finder_spec.rb53
-rw-r--r--spec/finders/ci/runners_finder_spec.rb6
-rw-r--r--spec/finders/clusters/knative_services_finder_spec.rb2
-rw-r--r--spec/finders/concerns/finder_with_cross_project_access_spec.rb6
-rw-r--r--spec/finders/crm/contacts_finder_spec.rb61
-rw-r--r--spec/finders/fork_targets_finder_spec.rb40
-rw-r--r--spec/finders/groups/accepting_project_transfers_finder_spec.rb63
-rw-r--r--spec/finders/groups/user_groups_finder_spec.rb112
-rw-r--r--spec/finders/projects/topics_finder_spec.rb28
-rw-r--r--spec/finders/tags_finder_spec.rb10
-rw-r--r--spec/fixtures/api/schemas/entities/discussion.json3
-rw-r--r--spec/fixtures/api/schemas/external_validation.json6
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/notes.json3
-rw-r--r--spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gzbin3846 -> 4603 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/100_files.zipbin0 -> 15902 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/200_mb_decompressed.zipbin0 -> 203718 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zipbin0 -> 332 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zipbin0 -> 177 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/with_directory.zipbin0 -> 520 bytes
-rw-r--r--spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/zipbomb.zipbin0 -> 1042247 bytes
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json71
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson4
-rw-r--r--spec/fixtures/markdown.md.erb2
-rw-r--r--spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom.sha11
-rw-r--r--spec/fixtures/whats_new/20201225_01_01.yml6
-rw-r--r--spec/fixtures/whats_new/20201225_01_02.yml6
-rw-r--r--spec/fixtures/whats_new/20201225_01_04.yml18
-rw-r--r--spec/fixtures/whats_new/20201225_01_05.yml12
-rw-r--r--spec/fixtures/whats_new/blank.yml4
-rw-r--r--spec/fixtures/whats_new/invalid.yml16
-rw-r--r--spec/fixtures/whats_new/valid.yml16
-rw-r--r--spec/frontend/__helpers__/mock_apollo_helper.js1
-rw-r--r--spec/frontend/__helpers__/mock_dom_observer.js2
-rw-r--r--spec/frontend/__helpers__/mocks/axios_utils.js2
-rw-r--r--spec/frontend/__helpers__/stub_component.js8
-rw-r--r--spec/frontend/__helpers__/timeout.js59
-rw-r--r--spec/frontend/__helpers__/vue_mount_component_helper.js29
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper.js19
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper_spec.js57
-rw-r--r--spec/frontend/__helpers__/vuex_action_helper.js6
-rw-r--r--spec/frontend/__helpers__/vuex_action_helper_spec.js2
-rw-r--r--spec/frontend/__helpers__/wait_for_promises.js6
-rw-r--r--spec/frontend/__helpers__/web_worker_transformer.js2
-rw-r--r--spec/frontend/__mocks__/monaco-editor/index.js1
-rw-r--r--spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap17
-rw-r--r--spec/frontend/access_tokens/components/expires_at_field_spec.js9
-rw-r--r--spec/frontend/access_tokens/components/projects_field_spec.js131
-rw-r--r--spec/frontend/access_tokens/components/projects_token_selector_spec.js266
-rw-r--r--spec/frontend/access_tokens/index_spec.js45
-rw-r--r--spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js4
-rw-r--r--spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js2
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js2
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_form_spec.js4
-rw-r--r--spec/frontend/admin/statistics_panel/components/app_spec.js2
-rw-r--r--spec/frontend/admin/users/components/actions/actions_spec.js2
-rw-r--r--spec/frontend/admin/users/components/app_spec.js2
-rw-r--r--spec/frontend/admin/users/components/modals/delete_user_modal_spec.js14
-rw-r--r--spec/frontend/admin/users/components/user_actions_spec.js4
-rw-r--r--spec/frontend/admin/users/components/user_avatar_spec.js6
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js8
-rw-r--r--spec/frontend/admin/users/index_spec.js4
-rw-r--r--spec/frontend/api/groups_api_spec.js46
-rw-r--r--spec/frontend/attention_requests/components/navigation_popover_spec.js88
-rw-r--r--spec/frontend/batch_comments/components/review_bar_spec.js51
-rw-r--r--spec/frontend/behaviors/components/json_table_spec.js162
-rw-r--r--spec/frontend/behaviors/gl_emoji_spec.js2
-rw-r--r--spec/frontend/behaviors/markdown/render_json_table_spec.js119
-rw-r--r--spec/frontend/blob/3d_viewer/mesh_object_spec.js4
-rw-r--r--spec/frontend/blob/blob_links_tracking_spec.js60
-rw-r--r--spec/frontend/blob/components/blob_content_spec.js22
-rw-r--r--spec/frontend/blob/components/blob_edit_content_spec.js2
-rw-r--r--spec/frontend/blob/components/blob_edit_header_spec.js6
-rw-r--r--spec/frontend/blob/components/blob_header_default_actions_spec.js8
-rw-r--r--spec/frontend/blob/components/blob_header_filepath_spec.js4
-rw-r--r--spec/frontend/blob/components/blob_header_spec.js14
-rw-r--r--spec/frontend/blob/components/blob_header_viewer_switcher_spec.js6
-rw-r--r--spec/frontend/blob/notebook/notebook_viever_spec.js8
-rw-r--r--spec/frontend/blob/pdf/pdf_viewer_spec.js6
-rw-r--r--spec/frontend/blob/pipeline_tour_success_modal_spec.js16
-rw-r--r--spec/frontend/blob/sketch/index_spec.js2
-rw-r--r--spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js2
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js6
-rw-r--r--spec/frontend/boards/board_list_spec.js2
-rw-r--r--spec/frontend/boards/components/board_add_new_column_form_spec.js30
-rw-r--r--spec/frontend/boards/components/board_add_new_column_trigger_spec.js4
-rw-r--r--spec/frontend/boards/components/board_blocked_icon_spec.js8
-rw-r--r--spec/frontend/boards/components/board_card_spec.js2
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js2
-rw-r--r--spec/frontend/boards/components/board_content_spec.js12
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js2
-rw-r--r--spec/frontend/boards/components/board_new_item_spec.js8
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js16
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js2
-rw-r--r--spec/frontend/boards/components/new_board_button_spec.js6
-rw-r--r--spec/frontend/boards/components/sidebar/board_editable_item_spec.js2
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js2
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js8
-rw-r--r--spec/frontend/boards/project_select_spec.js9
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js15
-rw-r--r--spec/frontend/captcha/wait_for_captcha_to_be_solved_spec.js5
-rw-r--r--spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js178
-rw-r--r--spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js139
-rw-r--r--spec/frontend/ci_variable_list/components/ci_group_variables_spec.js183
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js383
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js128
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_table_spec.js98
-rw-r--r--spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js8
-rw-r--r--spec/frontend/ci_variable_list/mocks.js109
-rw-r--r--spec/frontend/ci_variable_list/utils_spec.js78
-rw-r--r--spec/frontend/clusters/agents/components/activity_history_item_spec.js2
-rw-r--r--spec/frontend/clusters/agents/components/create_token_modal_spec.js2
-rw-r--r--spec/frontend/clusters/agents/components/token_table_spec.js4
-rw-r--r--spec/frontend/clusters/clusters_bundle_spec.js45
-rw-r--r--spec/frontend/clusters/components/new_cluster_spec.js4
-rw-r--r--spec/frontend/clusters/forms/components/integration_form_spec.js4
-rw-r--r--spec/frontend/clusters_list/components/install_agent_modal_spec.js1
-rw-r--r--spec/frontend/commit/pipelines/pipelines_table_spec.js39
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/link_spec.js2
-rw-r--r--spec/frontend/content_editor/components/bubble_menus/media_spec.js2
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js14
-rw-r--r--spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js21
-rw-r--r--spec/frontend/content_editor/components/top_toolbar_spec.js2
-rw-r--r--spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap115
-rw-r--r--spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js4
-rw-r--r--spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js84
-rw-r--r--spec/frontend/content_editor/extensions/image_spec.js2
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec_helper.js4
-rw-r--r--spec/frontend/content_editor/remark_markdown_processing_spec.js238
-rw-r--r--spec/frontend/content_editor/render_html_and_json_for_all_examples.js4
-rw-r--r--spec/frontend/content_editor/services/markdown_serializer_spec.js144
-rw-r--r--spec/frontend/content_editor/services/table_of_contents_utils_spec.js96
-rw-r--r--spec/frontend/crm/contact_form_wrapper_spec.js9
-rw-r--r--spec/frontend/crm/contacts_root_spec.js56
-rw-r--r--spec/frontend/crm/form_spec.js38
-rw-r--r--spec/frontend/crm/mock_data.js32
-rw-r--r--spec/frontend/crm/organization_form_wrapper_spec.js2
-rw-r--r--spec/frontend/cycle_analytics/base_spec.js20
-rw-r--r--spec/frontend/cycle_analytics/mock_data.js18
-rw-r--r--spec/frontend/cycle_analytics/store/actions_spec.js56
-rw-r--r--spec/frontend/cycle_analytics/store/mutations_spec.js43
-rw-r--r--spec/frontend/design_management/components/delete_button_spec.js4
-rw-r--r--spec/frontend/design_management/components/design_notes/design_discussion_spec.js14
-rw-r--r--spec/frontend/design_management/components/design_notes/design_note_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_notes/design_reply_form_spec.js18
-rw-r--r--spec/frontend/design_management/components/design_notes/toggle_replies_widget_spec.js8
-rw-r--r--spec/frontend/design_management/components/design_scaler_spec.js2
-rw-r--r--spec/frontend/design_management/components/design_sidebar_spec.js8
-rw-r--r--spec/frontend/design_management/components/design_todo_button_spec.js2
-rw-r--r--spec/frontend/design_management/components/image_spec.js2
-rw-r--r--spec/frontend/design_management/components/list/item_spec.js10
-rw-r--r--spec/frontend/design_management/components/toolbar/index_spec.js10
-rw-r--r--spec/frontend/design_management/components/upload/button_spec.js2
-rw-r--r--spec/frontend/design_management/components/upload/design_version_dropdown_spec.js12
-rw-r--r--spec/frontend/design_management/pages/design/index_spec.js12
-rw-r--r--spec/frontend/design_management/pages/index_spec.js14
-rw-r--r--spec/frontend/design_management/router_spec.js4
-rw-r--r--spec/frontend/diffs/components/diff_file_header_spec.js2
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js10
-rw-r--r--spec/frontend/diffs/store/utils_spec.js8
-rw-r--r--spec/frontend/dropzone_input_spec.js4
-rw-r--r--spec/frontend/editor/schema/ci/ci_schema_spec.js10
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml18
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/cache.yml8
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml8
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml14
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml25
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/cache.yml3
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/filter.yml10
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml15
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml23
-rw-r--r--spec/frontend/editor/source_editor_instance_spec.js2
-rw-r--r--spec/frontend/editor/source_editor_spec.js7
-rw-r--r--spec/frontend/environment.js1
-rw-r--r--spec/frontend/environments/canary_ingress_spec.js10
-rw-r--r--spec/frontend/environments/canary_update_modal_spec.js4
-rw-r--r--spec/frontend/environments/confirm_rollback_modal_spec.js14
-rw-r--r--spec/frontend/environments/deploy_board_component_spec.js8
-rw-r--r--spec/frontend/environments/edit_environment_spec.js2
-rw-r--r--spec/frontend/environments/environment_actions_spec.js12
-rw-r--r--spec/frontend/environments/environment_delete_spec.js2
-rw-r--r--spec/frontend/environments/environment_item_spec.js12
-rw-r--r--spec/frontend/environments/environment_pin_spec.js4
-rw-r--r--spec/frontend/environments/environment_rollback_spec.js4
-rw-r--r--spec/frontend/environments/environment_stop_spec.js2
-rw-r--r--spec/frontend/environments/environment_table_spec.js4
-rw-r--r--spec/frontend/environments/environments_detail_header_spec.js12
-rw-r--r--spec/frontend/environments/folder/environments_folder_view_spec.js6
-rw-r--r--spec/frontend/environments/new_environment_spec.js2
-rw-r--r--spec/frontend/error_tracking/components/error_details_spec.js44
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_actions_spec.js2
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js24
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_entry_spec.js10
-rw-r--r--spec/frontend/error_tracking/components/stacktrace_spec.js4
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js24
-rw-r--r--spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js4
-rw-r--r--spec/frontend/feature_flags/components/empty_state_spec.js14
-rw-r--r--spec/frontend/feature_flags/components/environments_dropdown_spec.js10
-rw-r--r--spec/frontend/feature_flags/components/feature_flags_table_spec.js4
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js18
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js24
-rw-r--r--spec/frontend/feature_flags/components/new_feature_flag_spec.js6
-rw-r--r--spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js12
-rw-r--r--spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js8
-rw-r--r--spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js12
-rw-r--r--spec/frontend/feature_flags/components/strategies/users_with_id_spec.js2
-rw-r--r--spec/frontend/feature_flags/components/strategy_parameters_spec.js6
-rw-r--r--spec/frontend/feature_flags/components/strategy_spec.js40
-rw-r--r--spec/frontend/fixtures/integrations.rb2
-rw-r--r--spec/frontend/fixtures/issues.rb40
-rw-r--r--spec/frontend/fixtures/namespaces.rb46
-rw-r--r--spec/frontend/fixtures/prometheus_integration.rb2
-rw-r--r--spec/frontend/fixtures/runner.rb44
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js13
-rw-r--r--spec/frontend/gfm_auto_complete/mock_data.js34
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js48
-rw-r--r--spec/frontend/group_settings/components/shared_runners_form_spec.js29
-rw-r--r--spec/frontend/groups/components/app_spec.js24
-rw-r--r--spec/frontend/groups/components/group_item_spec.js49
-rw-r--r--spec/frontend/groups/components/group_name_and_path_spec.js75
-rw-r--r--spec/frontend/groups/components/groups_spec.js4
-rw-r--r--spec/frontend/groups/components/transfer_group_form_spec.js1
-rw-r--r--spec/frontend/header_search/components/app_spec.js72
-rw-r--r--spec/frontend/helpers/diffs_helper_spec.js32
-rw-r--r--spec/frontend/ide/components/activity_bar_spec.js72
-rw-r--r--spec/frontend/ide/components/branches/item_spec.js6
-rw-r--r--spec/frontend/ide/components/branches/search_list_spec.js6
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js4
-rw-r--r--spec/frontend/ide/components/commit_sidebar/form_spec.js6
-rw-r--r--spec/frontend/ide/components/error_message_spec.js4
-rw-r--r--spec/frontend/ide/components/file_templates/dropdown_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_file_row_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_project_header_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_review_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_side_bar_spec.js34
-rw-r--r--spec/frontend/ide/components/ide_sidebar_nav_spec.js6
-rw-r--r--spec/frontend/ide/components/ide_spec.js2
-rw-r--r--spec/frontend/ide/components/ide_status_bar_spec.js124
-rw-r--r--spec/frontend/ide/components/ide_status_list_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_status_mr_spec.js4
-rw-r--r--spec/frontend/ide/components/ide_tree_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/detail/scroll_button_spec.js2
-rw-r--r--spec/frontend/ide/components/jobs/list_spec.js10
-rw-r--r--spec/frontend/ide/components/jobs/stage_spec.js8
-rw-r--r--spec/frontend/ide/components/merge_requests/list_spec.js8
-rw-r--r--spec/frontend/ide/components/new_dropdown/index_spec.js82
-rw-r--r--spec/frontend/ide/components/panes/collapsible_sidebar_spec.js2
-rw-r--r--spec/frontend/ide/components/panes/right_spec.js8
-rw-r--r--spec/frontend/ide/components/pipelines/empty_state_spec.js2
-rw-r--r--spec/frontend/ide/components/pipelines/list_spec.js12
-rw-r--r--spec/frontend/ide/components/preview/clientside_spec.js2
-rw-r--r--spec/frontend/ide/components/preview/navigator_spec.js4
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js12
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js5
-rw-r--r--spec/frontend/ide/components/repo_tab_spec.js6
-rw-r--r--spec/frontend/ide/components/resizable_panel_spec.js2
-rw-r--r--spec/frontend/ide/components/shared/commit_message_field_spec.js2
-rw-r--r--spec/frontend/ide/components/terminal/empty_state_spec.js10
-rw-r--r--spec/frontend/ide/components/terminal/session_spec.js4
-rw-r--r--spec/frontend/ide/components/terminal/terminal_controls_spec.js2
-rw-r--r--spec/frontend/ide/components/terminal/terminal_spec.js16
-rw-r--r--spec/frontend/ide/components/terminal/view_spec.js8
-rw-r--r--spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js4
-rw-r--r--spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js8
-rw-r--r--spec/frontend/ide/lib/common/model_manager_spec.js4
-rw-r--r--spec/frontend/ide/lib/diff/diff_spec.js8
-rw-r--r--spec/frontend/ide/stores/actions/file_spec.js8
-rw-r--r--spec/frontend/ide/stores/actions/tree_spec.js2
-rw-r--r--spec/frontend/ide/stores/getters_spec.js6
-rw-r--r--spec/frontend/ide/stores/modules/commit/getters_spec.js38
-rw-r--r--spec/frontend/ide/stores/mutations/file_spec.js20
-rw-r--r--spec/frontend/ide/stores/mutations/merge_request_spec.js2
-rw-r--r--spec/frontend/ide/utils_spec.js6
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js2
-rw-r--r--spec/frontend/integrations/overrides/components/integration_overrides_spec.js5
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js44
-rw-r--r--spec/frontend/invite_members/components/members_token_select_spec.js34
-rw-r--r--spec/frontend/invite_members/components/user_limit_notification_spec.js21
-rw-r--r--spec/frontend/invite_members/mock_data/api_responses.js15
-rw-r--r--spec/frontend/invite_members/mock_data/member_modal.js5
-rw-r--r--spec/frontend/issuable/components/related_issuable_item_spec.js233
-rw-r--r--spec/frontend/issuable/popover/components/issue_popover_spec.js62
-rw-r--r--spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js4
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_block_spec.js71
-rw-r--r--spec/frontend/issuable/related_issues/components/related_issues_root_spec.js367
-rw-r--r--spec/frontend/issues/list/components/issues_list_app_spec.js87
-rw-r--r--spec/frontend/issues/list/mock_data.js1
-rw-r--r--spec/frontend/issues/show/components/app_spec.js4
-rw-r--r--spec/frontend/issues/show/components/description_spec.js14
-rw-r--r--spec/frontend/issues/show/components/edit_actions_spec.js2
-rw-r--r--spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js189
-rw-r--r--spec/frontend/issues/show/components/incidents/mock_data.js28
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js117
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_item_spec.js (renamed from spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js)4
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js2
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js14
-rw-r--r--spec/frontend/issues/show/components/incidents/utils_spec.js2
-rw-r--r--spec/frontend/jira_connect/branches/components/project_dropdown_spec.js2
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/add_namespace_button_spec.js1
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js6
-rw-r--r--spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_page_spec.js2
-rw-r--r--spec/frontend/jobs/components/job_log_controllers_spec.js148
-rw-r--r--spec/frontend/jobs/components/sidebar_detail_row_spec.js2
-rw-r--r--spec/frontend/labels/labels_select_spec.js6
-rw-r--r--spec/frontend/lib/dompurify_spec.js46
-rw-r--r--spec/frontend/lib/gfm/index_spec.js156
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js23
-rw-r--r--spec/frontend/lib/utils/confirm_via_gl_modal/confirm_modal_spec.js4
-rw-r--r--spec/frontend/lib/utils/rails_ujs_spec.js12
-rw-r--r--spec/frontend/lib/utils/recurrence_spec.js5
-rw-r--r--spec/frontend/lib/utils/sticky_spec.js6
-rw-r--r--spec/frontend/lib/utils/text_markdown_spec.js136
-rw-r--r--spec/frontend/lib/utils/url_utility_spec.js66
-rw-r--r--spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/leave_button_spec.js4
-rw-r--r--spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/resend_invite_button_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/app_spec.js4
-rw-r--r--spec/frontend/members/components/avatars/group_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/avatars/user_avatar_spec.js4
-rw-r--r--spec/frontend/members/components/filter_sort/filter_sort_container_spec.js4
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js2
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js4
-rw-r--r--spec/frontend/members/components/modals/remove_group_link_modal_spec.js4
-rw-r--r--spec/frontend/members/components/modals/remove_member_modal_spec.js2
-rw-r--r--spec/frontend/members/components/table/created_at_spec.js2
-rw-r--r--spec/frontend/members/components/table/expiration_datepicker_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/table/member_avatar_spec.js2
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js2
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js20
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js4
-rw-r--r--spec/frontend/members/index_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/anomaly_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/bar_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/column_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/gauge_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/heatmap_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/single_stat_spec.js2
-rw-r--r--spec/frontend/monitoring/components/charts/stacked_column_spec.js4
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js29
-rw-r--r--spec/frontend/monitoring/components/dashboard_actions_menu_spec.js4
-rw-r--r--spec/frontend/monitoring/components/dashboard_header_spec.js19
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_builder_spec.js22
-rw-r--r--spec/frontend/monitoring/components/dashboard_panel_spec.js43
-rw-r--r--spec/frontend/monitoring/components/dashboard_spec.js36
-rw-r--r--spec/frontend/monitoring/components/dashboard_url_time_spec.js3
-rw-r--r--spec/frontend/monitoring/components/dashboards_dropdown_spec.js14
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js2
-rw-r--r--spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js12
-rw-r--r--spec/frontend/monitoring/components/embeds/embed_group_spec.js20
-rw-r--r--spec/frontend/monitoring/components/embeds/metric_embed_spec.js8
-rw-r--r--spec/frontend/monitoring/components/empty_state_spec.js4
-rw-r--r--spec/frontend/monitoring/components/graph_group_spec.js8
-rw-r--r--spec/frontend/monitoring/components/group_empty_state_spec.js2
-rw-r--r--spec/frontend/monitoring/components/links_section_spec.js2
-rw-r--r--spec/frontend/monitoring/components/refresh_button_spec.js6
-rw-r--r--spec/frontend/monitoring/components/variables/dropdown_field_spec.js4
-rw-r--r--spec/frontend/monitoring/components/variables_section_spec.js4
-rw-r--r--spec/frontend/monitoring/pages/panel_new_page_spec.js4
-rw-r--r--spec/frontend/monitoring/router_spec.js10
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js8
-rw-r--r--spec/frontend/nav/components/top_nav_dropdown_menu_spec.js2
-rw-r--r--spec/frontend/notes/components/comment_field_layout_spec.js4
-rw-r--r--spec/frontend/notes/components/diff_discussion_header_spec.js29
-rw-r--r--spec/frontend/notes/components/discussion_actions_spec.js22
-rw-r--r--spec/frontend/notes/components/discussion_counter_spec.js8
-rw-r--r--spec/frontend/notes/components/discussion_filter_note_spec.js4
-rw-r--r--spec/frontend/notes/components/discussion_notes_spec.js4
-rw-r--r--spec/frontend/notes/components/discussion_reply_placeholder_spec.js2
-rw-r--r--spec/frontend/notes/components/discussion_resolve_button_spec.js8
-rw-r--r--spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js2
-rw-r--r--spec/frontend/notes/components/note_actions/reply_button_spec.js2
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js37
-rw-r--r--spec/frontend/notes/components/note_attachment_spec.js4
-rw-r--r--spec/frontend/notes/components/note_body_spec.js21
-rw-r--r--spec/frontend/notes/components/note_form_spec.js20
-rw-r--r--spec/frontend/notes/components/note_header_spec.js22
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js14
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js18
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js7
-rw-r--r--spec/frontend/notes/components/sort_discussion_spec.js2
-rw-r--r--spec/frontend/notes/components/timeline_toggle_spec.js2
-rw-r--r--spec/frontend/notes/deprecated_notes_spec.js22
-rw-r--r--spec/frontend/operation_settings/components/metrics_settings_spec.js11
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js5
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js14
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js2
-rw-r--r--spec/frontend/packages_and_registries/dependency_proxy/app_spec.js18
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap29
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap196
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js192
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js32
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js20
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js16
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/mock_data.js42
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/details_spec.js187
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap3
-rw-r--r--spec/frontend/packages_and_registries/settings/project/settings/mock_data.js4
-rw-r--r--spec/frontend/pages/admin/application_settings/account_and_limits_spec.js8
-rw-r--r--spec/frontend/pages/groups/new/components/app_spec.js39
-rw-r--r--spec/frontend/pages/groups/new/components/create_group_description_details_spec.js57
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap28
-rw-r--r--spec/frontend/pages/projects/graphs/code_coverage_spec.js21
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js1
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js20
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js64
-rw-r--r--spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js12
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js72
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js187
-rw-r--r--spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js39
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js6
-rw-r--r--spec/frontend/pipeline_new/components/pipeline_new_form_spec.js4
-rw-r--r--spec/frontend/pipeline_schedules/components/take_ownership_modal_spec.js54
-rw-r--r--spec/frontend/pipeline_wizard/components/editor_spec.js4
-rw-r--r--spec/frontend/pipeline_wizard/components/step_spec.js4
-rw-r--r--spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js13
-rw-r--r--spec/frontend/pipelines/components/pipeline_tabs_spec.js4
-rw-r--r--spec/frontend/pipelines/components/pipelines_filtered_search_spec.js2
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js59
-rw-r--r--spec/frontend/pipelines/graph/linked_pipeline_spec.js2
-rw-r--r--spec/frontend/pipelines/performance_insights_modal_spec.js25
-rw-r--r--spec/frontend/pipelines/pipeline_multi_actions_spec.js13
-rw-r--r--spec/frontend/pipelines/pipeline_url_spec.js4
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js11
-rw-r--r--spec/frontend/pipelines/test_reports/test_reports_spec.js4
-rw-r--r--spec/frontend/projects/commits/components/author_select_spec.js10
-rw-r--r--spec/frontend/projects/compare/components/app_spec.js3
-rw-r--r--spec/frontend/projects/compare/components/mock_data.js25
-rw-r--r--spec/frontend/projects/project_new_spec.js67
-rw-r--r--spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js17
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js57
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js53
-rw-r--r--spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js50
-rw-r--r--spec/frontend/projects/settings/branch_rules/mock_data.js10
-rw-r--r--spec/frontend/projects/settings/branch_rules/rule_edit_spec.js61
-rw-r--r--spec/frontend/projects/settings/components/transfer_project_form_spec.js164
-rw-r--r--spec/frontend/prometheus_metrics/custom_metrics_spec.js48
-rw-r--r--spec/frontend/prometheus_metrics/prometheus_metrics_spec.js34
-rw-r--r--spec/frontend/releases/__snapshots__/util_spec.js.snap10
-rw-r--r--spec/frontend/releases/components/release_block_header_spec.js39
-rw-r--r--spec/frontend/reports/components/report_section_spec.js212
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js93
-rw-r--r--spec/frontend/repository/mock_data.js1
-rw-r--r--spec/frontend/right_sidebar_spec.js2
-rw-r--r--spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js2
-rw-r--r--spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js13
-rw-r--r--spec/frontend/runner/admin_runners/admin_runners_app_spec.js214
-rw-r--r--spec/frontend/runner/components/cells/runner_summary_cell_spec.js13
-rw-r--r--spec/frontend/runner/components/registration/registration_token_spec.js27
-rw-r--r--spec/frontend/runner/components/runner_assigned_item_spec.js16
-rw-r--r--spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js101
-rw-r--r--spec/frontend/runner/components/runner_bulk_delete_spec.js176
-rw-r--r--spec/frontend/runner/components/runner_filtered_search_bar_spec.js6
-rw-r--r--spec/frontend/runner/components/runner_jobs_spec.js3
-rw-r--r--spec/frontend/runner/components/runner_list_spec.js8
-rw-r--r--spec/frontend/runner/components/runner_pagination_spec.js138
-rw-r--r--spec/frontend/runner/components/runner_projects_spec.js1
-rw-r--r--spec/frontend/runner/components/stat/runner_count_spec.js4
-rw-r--r--spec/frontend/runner/components/stat/runner_single_stat_spec.js61
-rw-r--r--spec/frontend/runner/components/stat/runner_stats_spec.js37
-rw-r--r--spec/frontend/runner/components/stat/runner_status_stat_spec.js67
-rw-r--r--spec/frontend/runner/graphql/local_state_spec.js62
-rw-r--r--spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js9
-rw-r--r--spec/frontend/runner/group_runners/group_runners_app_spec.js56
-rw-r--r--spec/frontend/runner/mock_data.js43
-rw-r--r--spec/frontend/runner/runner_search_utils_spec.js40
-rw-r--r--spec/frontend/security_configuration/components/training_provider_list_spec.js2
-rw-r--r--spec/frontend/sidebar/assignees_spec.js2
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js1
-rw-r--r--spec/frontend/sidebar/components/attention_requested_toggle_spec.js121
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js25
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js5
-rw-r--r--spec/frontend/sidebar/components/incidents/escalation_status_spec.js5
-rw-r--r--spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js5
-rw-r--r--spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js36
-rw-r--r--spec/frontend/sidebar/mock_data.js12
-rw-r--r--spec/frontend/sidebar/reviewer_title_spec.js4
-rw-r--r--spec/frontend/sidebar/reviewers_spec.js64
-rw-r--r--spec/frontend/sidebar/sidebar_mediator_spec.js92
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap1
-rw-r--r--spec/frontend/surveys/merge_request_performance/app_spec.js9
-rw-r--r--spec/frontend/test_setup.js5
-rw-r--r--spec/frontend/user_popovers_spec.js218
-rw-r--r--spec/frontend/vue_merge_request_widget/components/action_buttons.js (renamed from spec/frontend/vue_mr_widget/components/extensions/actions_spec.js)2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/added_commit_message_spec.js (renamed from spec/frontend/vue_mr_widget/components/added_commit_message_spec.js)5
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js (renamed from spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js (renamed from spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js (renamed from spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/approvals/humanized_text_spec.js (renamed from spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js (renamed from spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js (renamed from spec/frontend/vue_mr_widget/components/artifacts_list_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/extensions/child_content_spec.js (renamed from spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/extensions/index_spec.js (renamed from spec/frontend/vue_mr_widget/components/extensions/index_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/extensions/status_icon_spec.js (renamed from spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/extensions/utils_spec.js (renamed from spec/frontend/vue_mr_widget/components/extensions/utils_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_alert_message_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_author_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_author_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_container_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_expandable_section_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_memory_usage_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js)12
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js)61
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_related_links_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js)17
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js (renamed from spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/pipeline_tour_mock_data.js (renamed from spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/review_app_link_spec.js (renamed from spec/frontend/vue_mr_widget/components/review_app_link_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap241
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap (renamed from spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap)4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap (renamed from spec/frontend/vue_mr_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/merge_checks_failed_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js)6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_archived_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js)5
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js)116
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js)6
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_checking_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_checking_spec.js)4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js)24
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_commit_message_dropdown_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_commits_header_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js)7
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js)4
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_failed_to_merge_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js)28
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js)57
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js)13
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_missing_branch_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_not_allowed_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js)2
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js)7
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js)164
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_sha_mismatch_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js)16
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/new_ready_to_merge_spec.js (renamed from spec/frontend/vue_mr_widget/components/states/new_ready_to_merge_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/terraform/mock_data.js (renamed from spec/frontend/vue_mr_widget/components/terraform/mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js (renamed from spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js (renamed from spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/app_spec.js19
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js167
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_list_spec.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js (renamed from spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js (renamed from spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/test_report/utils_spec.js (renamed from spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js (renamed from spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/accessibility/mock_data.js (renamed from spec/frontend/vue_mr_widget/extentions/accessibility/mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js (renamed from spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js (renamed from spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/terraform/index_spec.js (renamed from spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/mock_data.js (renamed from spec/frontend/vue_mr_widget/mock_data.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js (renamed from spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js (renamed from spec/frontend/vue_mr_widget/mr_widget_options_spec.js)82
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/artifacts_list/actions_spec.js (renamed from spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/artifacts_list/getters_spec.js (renamed from spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/artifacts_list/mutations_spec.js (renamed from spec/frontend/vue_mr_widget/stores/artifacts_list/mutations_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js (renamed from spec/frontend/vue_mr_widget/stores/get_state_key_spec.js)22
-rw-r--r--spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js (renamed from spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js)0
-rw-r--r--spec/frontend/vue_merge_request_widget/test_extensions.js (renamed from spec/frontend/vue_mr_widget/test_extensions.js)0
-rw-r--r--spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap145
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_details_spec.js6
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_metrics_spec.js4
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_status_spec.js2
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js4
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js4
-rw-r--r--spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/actions_button_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/alert_details_table_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/changed_file_icon_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/ci_icon_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/clipboard_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/clone_dropdown_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/color_picker/color_picker_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/commit_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/confirm_modal_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/dismissible_alert_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/dismissible_container_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_button_spec.js77
-rw-r--r--spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/expand_button_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/file_icon_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/file_row_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/file_tree_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js44
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js8
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js22
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/gitlab_version_check_spec.js31
-rw-r--r--spec/frontend/vue_shared/components/gl_modal_vuex_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/help_popover_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/integration_help_text_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js7
-rw-r--r--spec/frontend/vue_shared/components/markdown/header_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/memory_graph_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js81
-rw-r--r--spec/frontend/vue_shared/components/navigation_tabs_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/notes/noteable_warning_spec.js13
-rw-r--r--spec/frontend/vue_shared/components/notes/placeholder_note_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/pagination_links_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/project_avatar_spec.js36
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/project_selector/project_selector_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/registry/code_instruction_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/registry/details_row_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/registry/history_item_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/registry/list_item_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/registry/metadata_item_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/registry/registry_search_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/registry/title_area_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/rich_timestamp_tooltip_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/security_reports/help_icon_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js32
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/sidebar/todo_button_spec.js12
-rw-r--r--spec/frontend/vue_shared/components/source_editor_spec.js26
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js29
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js9
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js2
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/plugins/utils/gemspec_linker_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/split_button_spec.js5
-rw-r--r--spec/frontend/vue_shared/components/table_pagination_spec.js6
-rw-r--r--spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_access_role_badge_spec.js2
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js45
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js64
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js1
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js64
-rw-r--r--spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js39
-rw-r--r--spec/frontend/vue_shared/components/user_select_spec.js95
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js10
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js4
-rw-r--r--spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js14
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js48
-rw-r--r--spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js12
-rw-r--r--spec/frontend/vue_shared/issuable/list/mock_data.js1
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js10
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js6
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js12
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js22
-rw-r--r--spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js4
-rw-r--r--spec/frontend/vue_shared/security_reports/security_reports_app_spec.js4
-rw-r--r--spec/frontend/whats_new/components/app_spec.js4
-rw-r--r--spec/frontend/whats_new/components/feature_spec.js9
-rw-r--r--spec/frontend/work_items/components/item_state_spec.js16
-rw-r--r--spec/frontend/work_items/components/item_title_spec.js2
-rw-r--r--spec/frontend/work_items/components/work_item_actions_spec.js85
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_spec.js120
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js134
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js87
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js119
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_spec.js205
-rw-r--r--spec/frontend/work_items/components/work_item_state_spec.js16
-rw-r--r--spec/frontend/work_items/components/work_item_title_spec.js21
-rw-r--r--spec/frontend/work_items/components/work_item_type_icon_spec.js47
-rw-r--r--spec/frontend/work_items/components/work_item_weight_spec.js87
-rw-r--r--spec/frontend/work_items/mock_data.js218
-rw-r--r--spec/frontend/work_items/pages/work_item_detail_spec.js213
-rw-r--r--spec/frontend/work_items_hierarchy/components/app_spec.js8
-rw-r--r--spec/frontend/work_items_hierarchy/components/hierarchy_spec.js7
-rw-r--r--spec/frontend_integration/content_editor/content_editor_integration_spec.js68
-rw-r--r--spec/frontend_integration/fly_out_nav_browser_spec.js10
-rw-r--r--spec/frontend_integration/ide/helpers/start.js5
-rw-r--r--spec/frontend_integration/ide/ide_integration_spec.js4
-rw-r--r--spec/frontend_integration/snippets/snippets_notes_spec.js5
-rw-r--r--spec/frontend_integration/test_helpers/setup/setup_globals.js4
-rw-r--r--spec/graphql/features/feature_flag_spec.rb2
-rw-r--r--spec/graphql/graphql_triggers_spec.rb14
-rw-r--r--spec/graphql/mutations/ci/runner/bulk_delete_spec.rb91
-rw-r--r--spec/graphql/mutations/ci/runner/update_spec.rb3
-rw-r--r--spec/graphql/mutations/incident_management/timeline_event/update_spec.rb39
-rw-r--r--spec/graphql/mutations/merge_requests/set_labels_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_reviewers_spec.rb140
-rw-r--r--spec/graphql/mutations/releases/create_spec.rb6
-rw-r--r--spec/graphql/mutations/releases/delete_spec.rb2
-rw-r--r--spec/graphql/mutations/releases/update_spec.rb4
-rw-r--r--spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/crm/contact_state_counts_resolver_spec.rb63
-rw-r--r--spec/graphql/resolvers/crm/contacts_resolver_spec.rb21
-rw-r--r--spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/project_jobs_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb49
-rw-r--r--spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb4
-rw-r--r--spec/graphql/resolvers/projects_resolver_spec.rb2
-rw-r--r--spec/graphql/types/base_field_spec.rb10
-rw-r--r--spec/graphql/types/ci/group_variable_type_spec.rb9
-rw-r--r--spec/graphql/types/ci/instance_variable_type_spec.rb9
-rw-r--r--spec/graphql/types/ci/job_token_scope_type_spec.rb2
-rw-r--r--spec/graphql/types/ci/manual_variable_type_spec.rb7
-rw-r--r--spec/graphql/types/ci/project_variable_type_spec.rb9
-rw-r--r--spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb (renamed from spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb)4
-rw-r--r--spec/graphql/types/ci/variable_input_type_spec.rb11
-rw-r--r--spec/graphql/types/ci/variable_interface_spec.rb (renamed from spec/graphql/types/ci/variable_type_spec.rb)6
-rw-r--r--spec/graphql/types/customer_relations/contact_sort_enum_spec.rb28
-rw-r--r--spec/graphql/types/customer_relations/contact_state_counts_type_spec.rb17
-rw-r--r--spec/graphql/types/global_id_type_spec.rb6
-rw-r--r--spec/graphql/types/group_type_spec.rb51
-rw-r--r--spec/graphql/types/issue_type_spec.rb6
-rw-r--r--spec/graphql/types/namespace_type_spec.rb1
-rw-r--r--spec/graphql/types/notes/note_type_spec.rb1
-rw-r--r--spec/graphql/types/project_type_spec.rb6
-rw-r--r--spec/graphql/types/projects/service_type_enum_spec.rb1
-rw-r--r--spec/graphql/types/subscription_type_spec.rb1
-rw-r--r--spec/graphql/types/time_tracking/timelog_category_type_spec.rb22
-rw-r--r--spec/graphql/types/upload_type_spec.rb13
-rw-r--r--spec/graphql/types/user_type_spec.rb2
-rw-r--r--spec/graphql/types/work_item_type_spec.rb5
-rw-r--r--spec/graphql/types/work_items/widget_interface_spec.rb1
-rw-r--r--spec/graphql/types/work_items/widgets/assignees_input_type_spec.rb9
-rw-r--r--spec/graphql/types/work_items/widgets/labels_type_spec.rb11
-rw-r--r--spec/graphql/types/work_items/widgets/start_and_due_date_type_spec.rb11
-rw-r--r--spec/graphql/types/work_items/widgets/start_and_due_date_update_input_type_spec.rb9
-rw-r--r--spec/helpers/admin/identities_helper_spec.rb58
-rw-r--r--spec/helpers/application_helper_spec.rb8
-rw-r--r--spec/helpers/boards_helper_spec.rb3
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb8
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb4
-rw-r--r--spec/helpers/commits_helper_spec.rb18
-rw-r--r--spec/helpers/form_helper_spec.rb81
-rw-r--r--spec/helpers/gitlab_script_tag_helper_spec.rb10
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb2
-rw-r--r--spec/helpers/groups_helper_spec.rb75
-rw-r--r--spec/helpers/issuables_description_templates_helper_spec.rb115
-rw-r--r--spec/helpers/issuables_helper_spec.rb48
-rw-r--r--spec/helpers/members_helper_spec.rb33
-rw-r--r--spec/helpers/merge_requests_helper_spec.rb69
-rw-r--r--spec/helpers/namespaces_helper_spec.rb33
-rw-r--r--spec/helpers/nav/new_dropdown_helper_spec.rb2
-rw-r--r--spec/helpers/nav/top_nav_helper_spec.rb12
-rw-r--r--spec/helpers/profiles_helper_spec.rb32
-rw-r--r--spec/helpers/projects/pipeline_helper_spec.rb8
-rw-r--r--spec/helpers/projects_helper_spec.rb39
-rw-r--r--spec/helpers/search_helper_spec.rb13
-rw-r--r--spec/helpers/storage_helper_spec.rb24
-rw-r--r--spec/helpers/users_helper_spec.rb19
-rw-r--r--spec/initializers/00_deprecations_spec.rb20
-rw-r--r--spec/initializers/0_log_deprecations_spec.rb78
-rw-r--r--spec/initializers/diagnostic_reports_spec.rb65
-rw-r--r--spec/initializers/global_id_spec.rb2
-rw-r--r--spec/initializers/memory_watchdog_spec.rb116
-rw-r--r--spec/lib/api/ci/helpers/runner_helpers_spec.rb4
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_spec.rb1
-rw-r--r--spec/lib/api/entities/ci/job_request/image_spec.rb2
-rw-r--r--spec/lib/api/entities/ci/job_request/port_spec.rb2
-rw-r--r--spec/lib/api/entities/ci/job_request/service_spec.rb2
-rw-r--r--spec/lib/api/entities/project_spec.rb13
-rw-r--r--spec/lib/api/helpers/authentication_spec.rb2
-rw-r--r--spec/lib/api/helpers_spec.rb2
-rw-r--r--spec/lib/api/support/git_access_actor_spec.rb30
-rw-r--r--spec/lib/backup/database_spec.rb24
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb70
-rw-r--r--spec/lib/backup/manager_spec.rb24
-rw-r--r--spec/lib/banzai/cross_project_reference_spec.rb2
-rw-r--r--spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/commit_trailers_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/task_list_filter_spec.rb34
-rw-r--r--spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb29
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb4
-rw-r--r--spec/lib/banzai/renderer_spec.rb2
-rw-r--r--spec/lib/bitbucket_server/connection_spec.rb2
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb66
-rw-r--r--spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb4
-rw-r--r--spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb8
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb8
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb2
-rw-r--r--spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb10
-rw-r--r--spec/lib/container_registry/gitlab_api_client_spec.rb125
-rw-r--r--spec/lib/container_registry/tag_spec.rb35
-rw-r--r--spec/lib/feature_spec.rb4
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb40
-rw-r--r--spec/lib/gitlab/application_context_spec.rb2
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb71
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb2
-rw-r--r--spec/lib/gitlab/audit/auditor_spec.rb258
-rw-r--r--spec/lib/gitlab/audit/ci_runner_token_author_spec.rb2
-rw-r--r--spec/lib/gitlab/audit/deploy_key_author_spec.rb17
-rw-r--r--spec/lib/gitlab/audit/null_author_spec.rb9
-rw-r--r--spec/lib/gitlab/audit/null_target_spec.rb25
-rw-r--r--spec/lib/gitlab/audit/target_spec.rb47
-rw-r--r--spec/lib/gitlab/auth/auth_finders_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/ip_rate_limiter_spec.rb22
-rw-r--r--spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb30
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb1
-rw-r--r--spec/lib/gitlab/auth_spec.rb71
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb45
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb46
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/backfill_group_features_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb54
-rw-r--r--spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb123
-rw-r--r--spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb93
-rw-r--r--spec/lib/gitlab/background_migration/batched_migration_job_spec.rb151
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb15
-rw-r--r--spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb51
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb58
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb62
-rw-r--r--spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb66
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb13
-rw-r--r--spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb298
-rw-r--r--spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb3
-rw-r--r--spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb51
-rw-r--r--spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb41
-rw-r--r--spec/lib/gitlab/background_task_spec.rb209
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb19
-rw-r--r--spec/lib/gitlab/batch_pop_queueing_spec.rb2
-rw-r--r--spec/lib/gitlab/chat_name_token_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/ansi2json_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/artifacts/logger_spec.rb60
-rw-r--r--spec/lib/gitlab/ci/artifacts/metrics_spec.rb19
-rw-r--r--spec/lib/gitlab/ci/build/artifacts/adapters/zip_stream_spec.rb86
-rw-r--r--spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/build/releaser_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb98
-rw-r--r--spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/entry/image_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/imageable_spec.rb81
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/release_spec.rb38
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb27
-rw-r--r--spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb7
-rw-r--r--spec/lib/gitlab/ci/config/entry/service_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/tags_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/file/base_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/external/file/remote_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/normalizer_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb17
-rw-r--r--spec/lib/gitlab/ci/cron_parser_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb88
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb135
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb40
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb132
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb599
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb18
-rw-r--r--spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb90
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/component_spec.rb23
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/report_spec.rb54
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/reports_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/reports/sbom/source_spec.rb29
-rw-r--r--spec/lib/gitlab/ci/reports/security/reports_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/reports/test_suite_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/runner_releases_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/runner_upgrade_check_spec.rb45
-rw-r--r--spec/lib/gitlab/ci/status/bridge/common_spec.rb10
-rw-r--r--spec/lib/gitlab/ci/status/build/canceled_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/created_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/manual_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/pending_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/build/skipped_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/trace/remote_checksum_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/variables/builder_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb32
-rw-r--r--spec/lib/gitlab/ci/variables/helpers_spec.rb113
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/result_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb72
-rw-r--r--spec/lib/gitlab/composer/cache_spec.rb2
-rw-r--r--spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb2
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb4
-rw-r--r--spec/lib/gitlab/data_builder/issuable_spec.rb21
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb2
-rw-r--r--spec/lib/gitlab/data_builder/push_spec.rb1
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb69
-rw-r--r--spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb38
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb17
-rw-r--r--spec/lib/gitlab/database/async_indexes_spec.rb20
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_job_spec.rb21
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb12
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb45
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb16
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb61
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status_spec.rb45
-rw-r--r--spec/lib/gitlab/database/bulk_update_spec.rb3
-rw-r--r--spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb10
-rw-r--r--spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing/session_spec.rb6
-rw-r--r--spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing/sticking_spec.rb2
-rw-r--r--spec/lib/gitlab/database/load_balancing_spec.rb2
-rw-r--r--spec/lib/gitlab/database/lock_writes_manager_spec.rb123
-rw-r--r--spec/lib/gitlab/database/loose_foreign_keys_spec.rb28
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb7
-rw-r--r--spec/lib/gitlab/database/migration_helpers/v2_spec.rb10
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb82
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb117
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb6
-rw-r--r--spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb12
-rw-r--r--spec/lib/gitlab/database/migrations/runner_spec.rb2
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb15
-rw-r--r--spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb72
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb2
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb25
-rw-r--r--spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb2
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb21
-rw-r--r--spec/lib/gitlab/database/shared_model_spec.rb2
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb4
-rw-r--r--spec/lib/gitlab/database/with_lock_retries_spec.rb8
-rw-r--r--spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb14
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb2
-rw-r--r--spec/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512_spec.rb36
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb4
-rw-r--r--spec/lib/gitlab/email/handler/service_desk_handler_spec.rb20
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb2
-rw-r--r--spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking/logger_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb20
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb22
-rw-r--r--spec/lib/gitlab/exclusive_lease_helpers_spec.rb4
-rw-r--r--spec/lib/gitlab/file_markdown_link_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb79
-rw-r--r--spec/lib/gitlab/git/blame_spec.rb2
-rw-r--r--spec/lib/gitlab/git/blob_spec.rb3
-rw-r--r--spec/lib/gitlab/git/branch_spec.rb23
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb10
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb4
-rw-r--r--spec/lib/gitlab/git/raw_diff_change_spec.rb2
-rw-r--r--spec/lib/gitlab/git/remote_repository_spec.rb61
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb73
-rw-r--r--spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb58
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb37
-rw-r--r--spec/lib/gitlab/git/tree_spec.rb9
-rw-r--r--spec/lib/gitlab/git_access_spec.rb38
-rw-r--r--spec/lib/gitlab/git_spec.rb1
-rw-r--r--spec/lib/gitlab/gitaly_client/commit_service_spec.rb2
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb208
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb24
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb42
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/events/base_importer_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_assignee_spec.rb95
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb14
-rw-r--r--spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb72
-rw-r--r--spec/lib/gitlab/github_import/importer/events/closed_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb40
-rw-r--r--spec/lib/gitlab/github_import/importer/events/renamed_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/importer/events/reopened_spec.rb16
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb36
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb122
-rw-r--r--spec/lib/gitlab/github_import/importer/issue_importer_spec.rb22
-rw-r--r--spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/issuable_finder_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/parallel_scheduling_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_event_spec.rb81
-rw-r--r--spec/lib/gitlab/github_import/representation/issue_spec.rb15
-rw-r--r--spec/lib/gitlab/github_import/user_finder_spec.rb66
-rw-r--r--spec/lib/gitlab/global_id/deprecations_spec.rb15
-rw-r--r--spec/lib/gitlab/gpg_spec.rb4
-rw-r--r--spec/lib/gitlab/grape_logging/loggers/token_logger_spec.rb34
-rw-r--r--spec/lib/gitlab/graphql/deprecation_spec.rb69
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb115
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb95
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb415
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb595
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb118
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb135
-rw-r--r--spec/lib/gitlab/graphql/type_name_deprecations_spec.rb52
-rw-r--r--spec/lib/gitlab/graphs/commits_spec.rb2
-rw-r--r--spec/lib/gitlab/highlight_spec.rb2
-rw-r--r--spec/lib/gitlab/hook_data/group_builder_spec.rb3
-rw-r--r--spec/lib/gitlab/hook_data/group_member_builder_spec.rb3
-rw-r--r--spec/lib/gitlab/hook_data/key_builder_spec.rb4
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/hook_data/project_builder_spec.rb4
-rw-r--r--spec/lib/gitlab/hook_data/project_member_builder_spec.rb3
-rw-r--r--spec/lib/gitlab/hook_data/subgroup_builder_spec.rb2
-rw-r--r--spec/lib/gitlab/hook_data/user_builder_spec.rb4
-rw-r--r--spec/lib/gitlab/http_io_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb168
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml16
-rw-r--r--spec/lib/gitlab/import_export/base/relation_factory_spec.rb24
-rw-r--r--spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb5
-rw-r--r--spec/lib/gitlab/import_export/group/tree_restorer_spec.rb16
-rw-r--r--spec/lib/gitlab/import_export/import_test_coverage_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb48
-rw-r--r--spec/lib/gitlab/import_export/log_util_spec.rb43
-rw-r--r--spec/lib/gitlab/import_export/project/relation_saver_spec.rb125
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb49
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb27
-rw-r--r--spec/lib/gitlab/import_export/remote_stream_upload_spec.rb232
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml16
-rw-r--r--spec/lib/gitlab/import_export/shared_spec.rb10
-rw-r--r--spec/lib/gitlab/import_export/version_checker_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb22
-rw-r--r--spec/lib/gitlab/jira/dvcs_spec.rb6
-rw-r--r--spec/lib/gitlab/jira_import/issues_importer_spec.rb43
-rw-r--r--spec/lib/gitlab/kubernetes/rollout_status_spec.rb2
-rw-r--r--spec/lib/gitlab/mail_room/mail_room_spec.rb4
-rw-r--r--spec/lib/gitlab/memory/jemalloc_spec.rb25
-rw-r--r--spec/lib/gitlab/memory/reports/jemalloc_stats_spec.rb124
-rw-r--r--spec/lib/gitlab/memory/reports_daemon_spec.rb136
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb139
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb2
-rw-r--r--spec/lib/gitlab/metrics/web_transaction_spec.rb2
-rw-r--r--spec/lib/gitlab/middleware/compressed_json_spec.rb6
-rw-r--r--spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb2
-rw-r--r--spec/lib/gitlab/octokit/middleware_spec.rb2
-rw-r--r--spec/lib/gitlab/otp_key_rotator_spec.rb2
-rw-r--r--spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb14
-rw-r--r--spec/lib/gitlab/pagination/keyset_spec.rb2
-rw-r--r--spec/lib/gitlab/phabricator_import/conduit/response_spec.rb2
-rw-r--r--spec/lib/gitlab/prometheus_client_spec.rb2
-rw-r--r--spec/lib/gitlab/quick_actions/extractor_spec.rb8
-rw-r--r--spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb2
-rw-r--r--spec/lib/gitlab/rack_attack/user_allowlist_spec.rb2
-rw-r--r--spec/lib/gitlab/redis/cache_spec.rb12
-rw-r--r--spec/lib/gitlab/redis/hll_spec.rb8
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb24
-rw-r--r--spec/lib/gitlab/reference_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/regex_spec.rb8
-rw-r--r--spec/lib/gitlab/search/abuse_detection_spec.rb2
-rw-r--r--spec/lib/gitlab/search_context/builder_spec.rb2
-rw-r--r--spec/lib/gitlab/seeder_spec.rb40
-rw-r--r--spec/lib/gitlab/session_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_config_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb16
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb1
-rw-r--r--spec/lib/gitlab/slash_commands/deploy_spec.rb2
-rw-r--r--spec/lib/gitlab/spamcheck/client_spec.rb6
-rw-r--r--spec/lib/gitlab/ssh/commit_spec.rb82
-rw-r--r--spec/lib/gitlab/suggestions/file_suggestion_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb36
-rw-r--r--spec/lib/gitlab/tracking_spec.rb37
-rw-r--r--spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb32
-rw-r--r--spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb2
-rw-r--r--spec/lib/gitlab/usage/service_ping_report_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb43
-rw-r--r--spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb36
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb21
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter_spec.rb9
-rw-r--r--spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb8
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb6
-rw-r--r--spec/lib/gitlab/utils/batch_loader_spec.rb82
-rw-r--r--spec/lib/gitlab/utils/link_header_parser_spec.rb75
-rw-r--r--spec/lib/gitlab/utils/sanitize_node_link_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb139
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils_spec.rb4
-rw-r--r--spec/lib/gitlab/verify/uploads_spec.rb2
-rw-r--r--spec/lib/gitlab/version_info_spec.rb19
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb117
-rw-r--r--spec/lib/json_web_token/rsa_token_spec.rb5
-rw-r--r--spec/lib/marginalia_spec.rb18
-rw-r--r--spec/lib/mattermost/session_spec.rb2
-rw-r--r--spec/lib/microsoft_teams/notifier_spec.rb2
-rw-r--r--spec/lib/release_highlights/validator/entry_spec.rb20
-rw-r--r--spec/lib/release_highlights/validator_spec.rb2
-rw-r--r--spec/lib/security/report_schema_version_matcher_spec.rb44
-rw-r--r--spec/lib/sidebars/groups/menus/group_information_menu_spec.rb8
-rw-r--r--spec/lib/sidebars/menu_spec.rb2
-rw-r--r--spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb2
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb29
-rw-r--r--spec/mailers/emails/admin_notification_spec.rb64
-rw-r--r--spec/mailers/emails/profile_spec.rb3
-rw-r--r--spec/migrations/20210421163509_schedule_update_jira_tracker_data_deployment_type_based_on_url_spec.rb48
-rw-r--r--spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb2
-rw-r--r--spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb2
-rw-r--r--spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb3
-rw-r--r--spec/migrations/20220124130028_dedup_runner_projects_spec.rb2
-rw-r--r--spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb4
-rw-r--r--spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb2
-rw-r--r--spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb76
-rw-r--r--spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb73
-rw-r--r--spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb24
-rw-r--r--spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb22
-rw-r--r--spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb64
-rw-r--r--spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb (renamed from spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb)7
-rw-r--r--spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb64
-rw-r--r--spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb58
-rw-r--r--spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb41
-rw-r--r--spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb39
-rw-r--r--spec/migrations/associate_existing_dast_builds_with_variables_spec.rb70
-rw-r--r--spec/migrations/backfill_project_import_level_spec.rb29
-rw-r--r--spec/migrations/change_public_projects_cost_factor_spec.rb11
-rw-r--r--spec/migrations/clean_up_pending_builds_table_spec.rb2
-rw-r--r--spec/migrations/cleanup_mr_attention_request_todos_spec.rb46
-rw-r--r--spec/migrations/delete_security_findings_without_uuid_spec.rb2
-rw-r--r--spec/migrations/disable_job_token_scope_when_unused_spec.rb38
-rw-r--r--spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb2
-rw-r--r--spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb3
-rw-r--r--spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb32
-rw-r--r--spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb44
-rw-r--r--spec/migrations/schedule_migrate_shared_vulnerability_scanners_spec.rb59
-rw-r--r--spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb48
-rw-r--r--spec/migrations/start_backfill_ci_queuing_tables_spec.rb2
-rw-r--r--spec/models/active_session_spec.rb2
-rw-r--r--spec/models/application_setting_spec.rb7
-rw-r--r--spec/models/aws/role_spec.rb2
-rw-r--r--spec/models/board_spec.rb4
-rw-r--r--spec/models/chat_name_spec.rb4
-rw-r--r--spec/models/ci/bridge_spec.rb14
-rw-r--r--spec/models/ci/build_dependencies_spec.rb4
-rw-r--r--spec/models/ci/build_metadata_spec.rb13
-rw-r--r--spec/models/ci/build_runner_session_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb249
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb2
-rw-r--r--spec/models/ci/job_artifact_spec.rb14
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb311
-rw-r--r--spec/models/ci/processable_spec.rb2
-rw-r--r--spec/models/ci/runner_spec.rb65
-rw-r--r--spec/models/ci/runner_version_spec.rb9
-rw-r--r--spec/models/ci/secure_file_spec.rb1
-rw-r--r--spec/models/clusters/cluster_spec.rb4
-rw-r--r--spec/models/commit_signatures/ssh_signature_spec.rb2
-rw-r--r--spec/models/commit_status_spec.rb2
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb4
-rw-r--r--spec/models/concerns/chronic_duration_attribute_spec.rb12
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb24
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb2
-rw-r--r--spec/models/concerns/cross_database_modification_spec.rb32
-rw-r--r--spec/models/concerns/database_event_tracking_spec.rb69
-rw-r--r--spec/models/concerns/expirable_spec.rb5
-rw-r--r--spec/models/concerns/issuable_spec.rb21
-rw-r--r--spec/models/concerns/nullify_if_blank_spec.rb2
-rw-r--r--spec/models/concerns/participable_spec.rb26
-rw-r--r--spec/models/concerns/project_features_compatibility_spec.rb6
-rw-r--r--spec/models/concerns/reactive_caching_spec.rb2
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb12
-rw-r--r--spec/models/container_expiration_policy_spec.rb2
-rw-r--r--spec/models/container_repository_spec.rb184
-rw-r--r--spec/models/customer_relations/contact_spec.rb53
-rw-r--r--spec/models/customer_relations/contact_state_counts_spec.rb60
-rw-r--r--spec/models/data_list_spec.rb2
-rw-r--r--spec/models/deploy_key_spec.rb9
-rw-r--r--spec/models/design_management/version_spec.rb8
-rw-r--r--spec/models/environment_spec.rb231
-rw-r--r--spec/models/error_tracking/project_error_tracking_setting_spec.rb46
-rw-r--r--spec/models/event_spec.rb26
-rw-r--r--spec/models/group_group_link_spec.rb52
-rw-r--r--spec/models/group_spec.rb44
-rw-r--r--spec/models/hooks/system_hook_spec.rb2
-rw-r--r--spec/models/hooks/web_hook_spec.rb12
-rw-r--r--spec/models/incident_management/issuable_escalation_status_spec.rb1
-rw-r--r--spec/models/incident_management/timeline_event_spec.rb15
-rw-r--r--spec/models/integration_spec.rb5
-rw-r--r--spec/models/integrations/bamboo_spec.rb1
-rw-r--r--spec/models/integrations/bugzilla_spec.rb1
-rw-r--r--spec/models/integrations/buildkite_spec.rb1
-rw-r--r--spec/models/integrations/campfire_spec.rb2
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb4
-rw-r--r--spec/models/integrations/chat_message/wiki_page_message_spec.rb2
-rw-r--r--spec/models/integrations/custom_issue_tracker_spec.rb1
-rw-r--r--spec/models/integrations/datadog_spec.rb16
-rw-r--r--spec/models/integrations/drone_ci_spec.rb1
-rw-r--r--spec/models/integrations/ewm_spec.rb1
-rw-r--r--spec/models/integrations/external_wiki_spec.rb1
-rw-r--r--spec/models/integrations/harbor_spec.rb4
-rw-r--r--spec/models/integrations/jira_spec.rb22
-rw-r--r--spec/models/integrations/microsoft_teams_spec.rb1
-rw-r--r--spec/models/integrations/pumble_spec.rb14
-rw-r--r--spec/models/integrations/slack_spec.rb17
-rw-r--r--spec/models/integrations/teamcity_spec.rb1
-rw-r--r--spec/models/issue_spec.rb179
-rw-r--r--spec/models/jira_import_state_spec.rb30
-rw-r--r--spec/models/lfs_object_spec.rb4
-rw-r--r--spec/models/loose_foreign_keys/deleted_record_spec.rb16
-rw-r--r--spec/models/member_spec.rb31
-rw-r--r--spec/models/members/group_member_spec.rb90
-rw-r--r--spec/models/members/member_role_spec.rb17
-rw-r--r--spec/models/members/project_member_spec.rb9
-rw-r--r--spec/models/merge_request/approval_removal_settings_spec.rb52
-rw-r--r--spec/models/merge_request_spec.rb107
-rw-r--r--spec/models/milestone_spec.rb4
-rw-r--r--spec/models/ml/candidate_metric_spec.rb9
-rw-r--r--spec/models/ml/candidate_param_spec.rb9
-rw-r--r--spec/models/ml/candidate_spec.rb12
-rw-r--r--spec/models/ml/experiment_spec.rb11
-rw-r--r--spec/models/namespace/detail_spec.rb40
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb4
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb6
-rw-r--r--spec/models/namespace_spec.rb50
-rw-r--r--spec/models/namespaces/project_namespace_spec.rb8
-rw-r--r--spec/models/note_spec.rb74
-rw-r--r--spec/models/oauth_access_token_spec.rb68
-rw-r--r--spec/models/onboarding_progress_spec.rb2
-rw-r--r--spec/models/packages/cleanup/policy_spec.rb1
-rw-r--r--spec/models/packages/conan/metadatum_spec.rb2
-rw-r--r--spec/models/packages/package_file_spec.rb6
-rw-r--r--spec/models/packages/package_spec.rb6
-rw-r--r--spec/models/personal_access_token_spec.rb24
-rw-r--r--spec/models/postgresql/replication_slot_spec.rb2
-rw-r--r--spec/models/preloaders/labels_preloader_spec.rb2
-rw-r--r--spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb2
-rw-r--r--spec/models/project_spec.rb115
-rw-r--r--spec/models/project_statistics_spec.rb39
-rw-r--r--spec/models/projects/import_export/relation_export_spec.rb32
-rw-r--r--spec/models/projects/topic_spec.rb11
-rw-r--r--spec/models/protected_branch_spec.rb124
-rw-r--r--spec/models/release_highlight_spec.rb14
-rw-r--r--spec/models/release_spec.rb2
-rw-r--r--spec/models/releases/link_spec.rb2
-rw-r--r--spec/models/remote_mirror_spec.rb16
-rw-r--r--spec/models/repository_spec.rb40
-rw-r--r--spec/models/snippet_input_action_collection_spec.rb6
-rw-r--r--spec/models/snippet_spec.rb4
-rw-r--r--spec/models/u2f_registration_spec.rb106
-rw-r--r--spec/models/user_spec.rb444
-rw-r--r--spec/models/user_status_spec.rb26
-rw-r--r--spec/models/users/calloutable_spec.rb4
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb3
-rw-r--r--spec/models/users/project_callout_spec.rb24
-rw-r--r--spec/models/webauthn_registration_spec.rb1
-rw-r--r--spec/models/wiki_page/meta_spec.rb2
-rw-r--r--spec/models/work_item_spec.rb67
-rw-r--r--spec/models/work_items/parent_link_spec.rb64
-rw-r--r--spec/models/work_items/type_spec.rb11
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb6
-rw-r--r--spec/models/work_items/widgets/labels_spec.rb31
-rw-r--r--spec/models/work_items/widgets/start_and_due_date_spec.rb31
-rw-r--r--spec/policies/group_policy_spec.rb29
-rw-r--r--spec/policies/issuable_policy_spec.rb40
-rw-r--r--spec/policies/issue_policy_spec.rb55
-rw-r--r--spec/policies/namespaces/project_namespace_policy_spec.rb42
-rw-r--r--spec/policies/namespaces/user_namespace_policy_spec.rb2
-rw-r--r--spec/policies/project_hook_policy_spec.rb31
-rw-r--r--spec/policies/project_policy_spec.rb275
-rw-r--r--spec/policies/system_hook_policy_spec.rb29
-rw-r--r--spec/policies/timelog_policy_spec.rb2
-rw-r--r--spec/policies/upload_policy_spec.rb76
-rw-r--r--spec/policies/work_item_policy_spec.rb41
-rw-r--r--spec/presenters/alert_management/alert_presenter_spec.rb2
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb61
-rw-r--r--spec/presenters/merge_request_presenter_spec.rb25
-rw-r--r--spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb2
-rw-r--r--spec/presenters/project_hook_presenter_spec.rb4
-rw-r--r--spec/presenters/project_member_presenter_spec.rb118
-rw-r--r--spec/requests/admin/broadcast_messages_controller_spec.rb18
-rw-r--r--spec/requests/admin/integrations_controller_spec.rb1
-rw-r--r--spec/requests/api/api_spec.rb50
-rw-r--r--spec/requests/api/boards_spec.rb6
-rw-r--r--spec/requests/api/branches_spec.rb5
-rw-r--r--spec/requests/api/bulk_imports_spec.rb93
-rw-r--r--spec/requests/api/ci/jobs_spec.rb2
-rw-r--r--spec/requests/api/ci/pipeline_schedules_spec.rb106
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb2
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb4
-rw-r--r--spec/requests/api/ci/runner/runners_post_spec.rb9
-rw-r--r--spec/requests/api/ci/secure_files_spec.rb6
-rw-r--r--spec/requests/api/ci/triggers_spec.rb8
-rw-r--r--spec/requests/api/ci/variables_spec.rb10
-rw-r--r--spec/requests/api/clusters/agents_spec.rb4
-rw-r--r--spec/requests/api/commits_spec.rb107
-rw-r--r--spec/requests/api/conan_instance_packages_spec.rb2
-rw-r--r--spec/requests/api/conan_project_packages_spec.rb2
-rw-r--r--spec/requests/api/dependency_proxy_spec.rb2
-rw-r--r--spec/requests/api/deployments_spec.rb84
-rw-r--r--spec/requests/api/doorkeeper_access_spec.rb6
-rw-r--r--spec/requests/api/go_proxy_spec.rb8
-rw-r--r--spec/requests/api/graphql/boards/board_list_issues_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/boards/board_lists_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/ci/instance_variables_spec.rb12
-rw-r--r--spec/requests/api/graphql/ci/manual_variables_spec.rb8
-rw-r--r--spec/requests/api/graphql/ci/pipelines_spec.rb29
-rw-r--r--spec/requests/api/graphql/ci/runners_spec.rb4
-rw-r--r--spec/requests/api/graphql/crm/contacts_spec.rb24
-rw-r--r--spec/requests/api/graphql/current_user/groups_query_spec.rb19
-rw-r--r--spec/requests/api/graphql/custom_emoji_query_spec.rb4
-rw-r--r--spec/requests/api/graphql/group/group_members_spec.rb18
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb81
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/add_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/boards/destroy_spec.rb11
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_retry_spec.rb32
-rw-r--r--spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/request_attention_spec.rb79
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb106
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/update_reviewer_state_spec.rb65
-rw-r--r--spec/requests/api/graphql/mutations/notes/create/note_spec.rb24
-rw-r--r--spec/requests/api/graphql/mutations/releases/create_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/releases/update_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/remove_attention_request_spec.rb79
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb6
-rw-r--r--spec/requests/api/graphql/mutations/timelogs/create_spec.rb48
-rw-r--r--spec/requests/api/graphql/mutations/timelogs/delete_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/uploads/delete_spec.rb74
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb3
-rw-r--r--spec/requests/api/graphql/mutations/work_items/create_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb223
-rw-r--r--spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb2
-rw-r--r--spec/requests/api/graphql/packages/conan_spec.rb2
-rw-r--r--spec/requests/api/graphql/packages/helm_spec.rb2
-rw-r--r--spec/requests/api/graphql/packages/package_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/base_service_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/fork_targets_spec.rb69
-rw-r--r--spec/requests/api/graphql/project/jira_import_spec.rb14
-rw-r--r--spec/requests/api/graphql/project/project_members_spec.rb18
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb2
-rw-r--r--spec/requests/api/graphql/project_query_spec.rb96
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb93
-rw-r--r--spec/requests/api/graphql_spec.rb53
-rw-r--r--spec/requests/api/group_variables_spec.rb8
-rw-r--r--spec/requests/api/groups_spec.rb2
-rw-r--r--spec/requests/api/helpers_spec.rb6
-rw-r--r--spec/requests/api/integrations_spec.rb1
-rw-r--r--spec/requests/api/internal/base_spec.rb39
-rw-r--r--spec/requests/api/internal/error_tracking_spec.rb2
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb178
-rw-r--r--spec/requests/api/internal/workhorse_spec.rb1
-rw-r--r--spec/requests/api/invitations_spec.rb2
-rw-r--r--spec/requests/api/issue_links_spec.rb25
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb44
-rw-r--r--spec/requests/api/markdown_spec.rb59
-rw-r--r--spec/requests/api/maven_packages_spec.rb35
-rw-r--r--spec/requests/api/members_spec.rb49
-rw-r--r--spec/requests/api/merge_requests_spec.rb125
-rw-r--r--spec/requests/api/metrics/dashboard/annotations_spec.rb2
-rw-r--r--spec/requests/api/notes_spec.rb2
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb14
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb2
-rw-r--r--spec/requests/api/pages/pages_spec.rb2
-rw-r--r--spec/requests/api/pages_domains_spec.rb4
-rw-r--r--spec/requests/api/personal_access_tokens_spec.rb4
-rw-r--r--spec/requests/api/project_attributes.yml2
-rw-r--r--spec/requests/api/project_import_spec.rb2
-rw-r--r--spec/requests/api/project_packages_spec.rb76
-rw-r--r--spec/requests/api/project_templates_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb4
-rw-r--r--spec/requests/api/protected_branches_spec.rb62
-rw-r--r--spec/requests/api/pypi_packages_spec.rb4
-rw-r--r--spec/requests/api/release/links_spec.rb8
-rw-r--r--spec/requests/api/releases_spec.rb8
-rw-r--r--spec/requests/api/repositories_spec.rb26
-rw-r--r--spec/requests/api/search_spec.rb11
-rw-r--r--spec/requests/api/settings_spec.rb2
-rw-r--r--spec/requests/api/snippets_spec.rb10
-rw-r--r--spec/requests/api/topics_spec.rb16
-rw-r--r--spec/requests/api/unleash_spec.rb6
-rw-r--r--spec/requests/api/user_counts_spec.rb15
-rw-r--r--spec/requests/api/users_spec.rb14
-rw-r--r--spec/requests/git_http_spec.rb2
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb2
-rw-r--r--spec/requests/jira_connect/subscriptions_controller_spec.rb4
-rw-r--r--spec/requests/jwt_controller_spec.rb13
-rw-r--r--spec/requests/lfs_http_spec.rb14
-rw-r--r--spec/requests/oauth/tokens_controller_spec.rb2
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb6
-rw-r--r--spec/requests/projects/merge_requests/diffs_spec.rb189
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb275
-rw-r--r--spec/requests/projects/settings/packages_and_registries_controller_spec.rb70
-rw-r--r--spec/requests/rack_attack_global_spec.rb12
-rw-r--r--spec/requests/users/namespace_callouts_spec.rb57
-rw-r--r--spec/requests/users/project_callouts_spec.rb58
-rw-r--r--spec/routing/project_routing_spec.rb16
-rw-r--r--spec/rubocop/code_reuse_helpers_spec.rb60
-rw-r--r--spec/rubocop/cop/code_reuse/worker_spec.rb19
-rw-r--r--spec/rubocop/cop/gemspec/avoid_executing_git_spec.rb30
-rw-r--r--spec/rubocop/cop/gitlab/deprecate_track_redis_hll_event_spec.rb19
-rw-r--r--spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb4
-rw-r--r--spec/rubocop/cop/inject_enterprise_edition_module_spec.rb1
-rw-r--r--spec/rubocop/cop_todo_spec.rb124
-rw-r--r--spec/rubocop/formatter/todo_formatter_spec.rb12
-rw-r--r--spec/scripts/changed-feature-flags_spec.rb4
-rw-r--r--spec/scripts/lib/glfm/update_example_snapshots_spec.rb12
-rw-r--r--spec/scripts/trigger-build_spec.rb9
-rw-r--r--spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb4
-rw-r--r--spec/serializers/deploy_keys/deploy_key_entity_spec.rb4
-rw-r--r--spec/serializers/environment_serializer_spec.rb31
-rw-r--r--spec/serializers/group_access_token_entity_spec.rb57
-rw-r--r--spec/serializers/group_access_token_serializer_spec.rb28
-rw-r--r--spec/serializers/integrations/project_entity_spec.rb1
-rw-r--r--spec/serializers/issue_entity_spec.rb7
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb34
-rw-r--r--spec/serializers/merge_request_user_entity_spec.rb11
-rw-r--r--spec/serializers/personal_access_token_entity_spec.rb27
-rw-r--r--spec/serializers/personal_access_token_serializer_spec.rb21
-rw-r--r--spec/serializers/project_access_token_entity_spec.rb61
-rw-r--r--spec/serializers/project_access_token_serializer_spec.rb28
-rw-r--r--spec/services/alert_management/process_prometheus_alert_service_spec.rb1
-rw-r--r--spec/services/audit_events/build_service_spec.rb154
-rw-r--r--spec/services/auto_merge/base_service_spec.rb2
-rw-r--r--spec/services/auto_merge_service_spec.rb4
-rw-r--r--spec/services/branches/create_service_spec.rb142
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb6
-rw-r--r--spec/services/bulk_imports/file_download_service_spec.rb35
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb1028
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb815
-rw-r--r--spec/services/ci/deployments/destroy_service_spec.rb65
-rw-r--r--spec/services/ci/destroy_pipeline_service_spec.rb20
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb8
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb9
-rw-r--r--spec/services/ci/list_config_variables_service_spec.rb4
-rw-r--r--spec/services/ci/parse_dotenv_artifact_service_spec.rb2
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb2
-rw-r--r--spec/services/ci/process_build_service_spec.rb2
-rw-r--r--spec/services/ci/register_job_service_spec.rb68
-rw-r--r--spec/services/ci/retry_job_service_spec.rb61
-rw-r--r--spec/services/ci/runners/assign_runner_service_spec.rb18
-rw-r--r--spec/services/ci/runners/bulk_delete_runners_service_spec.rb83
-rw-r--r--spec/services/ci/runners/process_runner_version_update_service_spec.rb80
-rw-r--r--spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb77
-rw-r--r--spec/services/ci/runners/register_runner_service_spec.rb150
-rw-r--r--spec/services/ci/runners/reset_registration_token_service_spec.rb13
-rw-r--r--spec/services/ci/runners/unassign_runner_service_spec.rb28
-rw-r--r--spec/services/ci/runners/unregister_runner_service_spec.rb7
-rw-r--r--spec/services/ci/runners/update_runner_service_spec.rb2
-rw-r--r--spec/services/ci/stuck_builds/drop_pending_service_spec.rb4
-rw-r--r--spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb2
-rw-r--r--spec/services/ci/track_failed_build_service_spec.rb56
-rw-r--r--spec/services/ci/update_build_state_service_spec.rb18
-rw-r--r--spec/services/clusters/integrations/create_service_spec.rb2
-rw-r--r--spec/services/clusters/integrations/prometheus_health_check_service_spec.rb1
-rw-r--r--spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb2
-rw-r--r--spec/services/database/consistency_check_service_spec.rb53
-rw-r--r--spec/services/deployments/create_for_build_service_spec.rb2
-rw-r--r--spec/services/deployments/update_environment_service_spec.rb12
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb24
-rw-r--r--spec/services/design_management/generate_image_versions_service_spec.rb2
-rw-r--r--spec/services/git/branch_push_service_spec.rb4
-rw-r--r--spec/services/google_cloud/create_cloudsql_instance_service_spec.rb90
-rw-r--r--spec/services/google_cloud/enable_cloudsql_service_spec.rb39
-rw-r--r--spec/services/google_cloud/get_cloudsql_instances_service_spec.rb62
-rw-r--r--spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb68
-rw-r--r--spec/services/groups/create_service_spec.rb9
-rw-r--r--spec/services/groups/destroy_service_spec.rb15
-rw-r--r--spec/services/groups/merge_requests_count_service_spec.rb2
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb2
-rw-r--r--spec/services/groups/transfer_service_spec.rb37
-rw-r--r--spec/services/groups/update_service_spec.rb40
-rw-r--r--spec/services/groups/update_statistics_service_spec.rb2
-rw-r--r--spec/services/import/fogbugz_service_spec.rb2
-rw-r--r--spec/services/import/prepare_service_spec.rb66
-rw-r--r--spec/services/import/validate_remote_git_endpoint_service_spec.rb2
-rw-r--r--spec/services/incident_management/timeline_events/create_service_spec.rb83
-rw-r--r--spec/services/incident_management/timeline_events/update_service_spec.rb21
-rw-r--r--spec/services/issuable/common_system_notes_service_spec.rb45
-rw-r--r--spec/services/issues/clone_service_spec.rb26
-rw-r--r--spec/services/issues/create_service_spec.rb12
-rw-r--r--spec/services/issues/move_service_spec.rb12
-rw-r--r--spec/services/issues/prepare_import_csv_service_spec.rb51
-rw-r--r--spec/services/issues/referenced_merge_requests_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb58
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb2
-rw-r--r--spec/services/lfs/push_service_spec.rb2
-rw-r--r--spec/services/markdown_content_rewriter_service_spec.rb2
-rw-r--r--spec/services/members/groups/creator_service_spec.rb5
-rw-r--r--spec/services/members/invite_service_spec.rb6
-rw-r--r--spec/services/merge_requests/approval_service_spec.rb112
-rw-r--r--spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb49
-rw-r--r--spec/services/merge_requests/close_service_spec.rb4
-rw-r--r--spec/services/merge_requests/create_approval_event_service_spec.rb22
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb10
-rw-r--r--spec/services/merge_requests/execute_approval_hooks_service_spec.rb33
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb8
-rw-r--r--spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb7
-rw-r--r--spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb7
-rw-r--r--spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb7
-rw-r--r--spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb7
-rw-r--r--spec/services/merge_requests/mergeability/check_open_status_service_spec.rb7
-rw-r--r--spec/services/merge_requests/mergeability/run_checks_service_spec.rb94
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb22
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb2
-rw-r--r--spec/services/merge_requests/remove_attention_requested_service_spec.rb183
-rw-r--r--spec/services/merge_requests/request_attention_service_spec.rb220
-rw-r--r--spec/services/merge_requests/toggle_attention_requested_service_spec.rb188
-rw-r--r--spec/services/merge_requests/update_reviewers_service_spec.rb162
-rw-r--r--spec/services/merge_requests/update_service_spec.rb10
-rw-r--r--spec/services/milestones/transfer_service_spec.rb8
-rw-r--r--spec/services/notes/build_service_spec.rb10
-rw-r--r--spec/services/notes/copy_service_spec.rb6
-rw-r--r--spec/services/notes/create_service_spec.rb94
-rw-r--r--spec/services/notes/destroy_service_spec.rb26
-rw-r--r--spec/services/notes/update_service_spec.rb34
-rw-r--r--spec/services/notification_service_spec.rb45
-rw-r--r--spec/services/packages/composer/create_package_service_spec.rb2
-rw-r--r--spec/services/packages/create_dependency_service_spec.rb2
-rw-r--r--spec/services/packages/debian/extract_deb_metadata_service_spec.rb2
-rw-r--r--spec/services/packages/debian/extract_metadata_service_spec.rb7
-rw-r--r--spec/services/packages/debian/parse_debian822_service_spec.rb8
-rw-r--r--spec/services/packages/debian/sign_distribution_service_spec.rb2
-rw-r--r--spec/services/packages/helm/process_file_service_spec.rb2
-rw-r--r--spec/services/packages/npm/create_package_service_spec.rb8
-rw-r--r--spec/services/packages/npm/create_tag_service_spec.rb1
-rw-r--r--spec/services/packages/rubygems/dependency_resolver_service_spec.rb14
-rw-r--r--spec/services/pages/delete_service_spec.rb4
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb2
-rw-r--r--spec/services/personal_access_tokens/revoke_service_spec.rb1
-rw-r--r--spec/services/projects/after_rename_service_spec.rb9
-rw-r--r--spec/services/projects/alerting/notify_service_spec.rb5
-rw-r--r--spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb2
-rw-r--r--spec/services/projects/create_service_spec.rb39
-rw-r--r--spec/services/projects/enable_deploy_key_service_spec.rb2
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb6
-rw-r--r--spec/services/projects/hashed_storage/rollback_repository_service_spec.rb8
-rw-r--r--spec/services/projects/import_export/export_service_spec.rb23
-rw-r--r--spec/services/projects/import_export/relation_export_service_spec.rb121
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb2
-rw-r--r--spec/services/projects/lfs_pointers/lfs_download_service_spec.rb2
-rw-r--r--spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb6
-rw-r--r--spec/services/projects/participants_service_spec.rb6
-rw-r--r--spec/services/projects/prometheus/alerts/notify_service_spec.rb9
-rw-r--r--spec/services/projects/transfer_service_spec.rb43
-rw-r--r--spec/services/projects/update_service_spec.rb12
-rw-r--r--spec/services/projects/update_statistics_service_spec.rb2
-rw-r--r--spec/services/protected_branches/cache_service_spec.rb113
-rw-r--r--spec/services/protected_branches/create_service_spec.rb29
-rw-r--r--spec/services/protected_branches/destroy_service_spec.rb27
-rw-r--r--spec/services/protected_branches/update_service_spec.rb33
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb110
-rw-r--r--spec/services/releases/create_service_spec.rb8
-rw-r--r--spec/services/releases/destroy_service_spec.rb2
-rw-r--r--spec/services/releases/update_service_spec.rb2
-rw-r--r--spec/services/resource_access_tokens/create_service_spec.rb2
-rw-r--r--spec/services/resource_events/change_labels_service_spec.rb60
-rw-r--r--spec/services/search/group_service_spec.rb2
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb1
-rw-r--r--spec/services/snippets/update_service_spec.rb2
-rw-r--r--spec/services/suggestions/apply_service_spec.rb2
-rw-r--r--spec/services/system_note_service_spec.rb86
-rw-r--r--spec/services/system_notes/issuables_service_spec.rb108
-rw-r--r--spec/services/system_notes/merge_requests_service_spec.rb4
-rw-r--r--spec/services/system_notes/time_tracking_service_spec.rb174
-rw-r--r--spec/services/terraform/remote_state_handler_spec.rb2
-rw-r--r--spec/services/timelogs/create_service_spec.rb47
-rw-r--r--spec/services/timelogs/delete_service_spec.rb14
-rw-r--r--spec/services/todo_service_spec.rb15
-rw-r--r--spec/services/todos/destroy/design_service_spec.rb8
-rw-r--r--spec/services/todos/destroy/destroyed_issuable_service_spec.rb53
-rw-r--r--spec/services/topics/merge_service_spec.rb60
-rw-r--r--spec/services/uploads/destroy_service_spec.rb103
-rw-r--r--spec/services/users/create_service_spec.rb15
-rw-r--r--spec/services/users/dismiss_namespace_callout_service_spec.rb24
-rw-r--r--spec/services/users/dismiss_project_callout_service_spec.rb25
-rw-r--r--spec/services/users/update_service_spec.rb2
-rw-r--r--spec/services/web_hook_service_spec.rb4
-rw-r--r--spec/services/web_hooks/destroy_service_spec.rb67
-rw-r--r--spec/services/web_hooks/log_execution_service_spec.rb61
-rw-r--r--spec/services/webauthn/authenticate_service_spec.rb25
-rw-r--r--spec/services/work_items/create_and_link_service_spec.rb40
-rw-r--r--spec/services/work_items/create_from_task_service_spec.rb2
-rw-r--r--spec/services/work_items/create_service_spec.rb10
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb28
-rw-r--r--spec/services/work_items/parent_links/destroy_service_spec.rb11
-rw-r--r--spec/services/work_items/update_service_spec.rb119
-rw-r--r--spec/services/work_items/widgets/assignees_service/update_service_spec.rb116
-rw-r--r--spec/services/work_items/widgets/description_service/update_service_spec.rb94
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb26
-rw-r--r--spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb62
-rw-r--r--spec/services/work_items/widgets/weight_service/update_service_spec.rb36
-rw-r--r--spec/spec_helper.rb27
-rw-r--r--spec/support/database/cross-join-allowlist.yml7
-rw-r--r--spec/support/database/gitlab_schemas_validate_connection.rb17
-rw-r--r--spec/support/database/multiple_databases.rb20
-rw-r--r--spec/support/finder_collection_allowlist.yml2
-rw-r--r--spec/support/helpers/api_helpers.rb13
-rw-r--r--spec/support/helpers/ci/template_helpers.rb4
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb25
-rw-r--r--spec/support/helpers/dns_helpers.rb25
-rw-r--r--spec/support/helpers/features/blob_spec_helpers.rb8
-rw-r--r--spec/support/helpers/features/invite_members_modal_helper.rb43
-rw-r--r--spec/support/helpers/features/runners_helpers.rb (renamed from spec/support/helpers/features/runner_helpers.rb)0
-rw-r--r--spec/support/helpers/features/source_editor_spec_helpers.rb5
-rw-r--r--spec/support/helpers/gitaly_setup.rb14
-rw-r--r--spec/support/helpers/global_id_deprecation_helpers.rb8
-rw-r--r--spec/support/helpers/graphql_helpers.rb8
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb21
-rw-r--r--spec/support/helpers/lfs_http_helpers.rb4
-rw-r--r--spec/support/helpers/login_helpers.rb4
-rw-r--r--spec/support/helpers/query_recorder.rb2
-rw-r--r--spec/support/helpers/rack_attack_spec_helpers.rb6
-rw-r--r--spec/support/helpers/redis_commands/recorder.rb34
-rw-r--r--spec/support/helpers/runner_releases_helper.rb22
-rw-r--r--spec/support/helpers/stub_member.rb8
-rw-r--r--spec/support/helpers/stub_method_calls.rb2
-rw-r--r--spec/support/helpers/stubbed_member.rb28
-rw-r--r--spec/support/helpers/type_name_deprecation_helpers.rb15
-rw-r--r--spec/support/matchers/event_store.rb4
-rw-r--r--spec/support/matchers/markdown_matchers.rb4
-rw-r--r--spec/support/shared_contexts/bulk_imports_requests_shared_context.rb14
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/fixtures/analytics_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/markdown_snapshot_shared_examples.rb5
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/project_policy_shared_context.rb1
-rw-r--r--spec/support/shared_contexts/policies/project_policy_table_shared_context.rb57
-rw-r--r--spec/support/shared_contexts/upload_type_check_shared_context.rb14
-rw-r--r--spec/support/shared_examples/attention_request_cache_invalidation_examples.rb15
-rw-r--r--spec/support/shared_examples/boards/destroy_service_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/components/pajamas_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/controllers/search_cross_project_authorization_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/controllers/search_external_authorization_service_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb21
-rw-r--r--spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb80
-rw-r--r--spec/support/shared_examples/features/access_tokens_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/features/inviting_members_shared_examples.rb110
-rw-r--r--spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/features/trial_email_validation_shared_example.rb59
-rw-r--r--spec/support/shared_examples/features/user_views_tag_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/features/variable_list_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/graphql/mutations/timelogs/create_shared_examples.rb97
-rw-r--r--spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb34
-rw-r--r--spec/support/shared_examples/graphql/notes_creation_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb17
-rw-r--r--spec/support/shared_examples/helpers/wiki_helpers_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb5
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/models/chat_integration_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/ci/metadata_id_tokens_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb64
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/issuable_link_shared_examples.rb1
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb19
-rw-r--r--spec/support/shared_examples/models/project_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/models/taskable_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/namespaces/traversal_scope_examples.rb16
-rw-r--r--spec/support/shared_examples/policies/group_project_namespace_policy_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/requests/api/discussions_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/notes_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb8
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb35
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb120
-rw-r--r--spec/support/shared_examples/services/issuable_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/services/packages_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/services/snowplow_tracking_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/services/timelogs/create_service_shared_examples.rb89
-rw-r--r--spec/support/shared_examples/services/work_items/create_task_shared_examples.rb16
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb31
-rw-r--r--spec/support_specs/database/prevent_cross_joins_spec.rb2
-rw-r--r--spec/support_specs/helpers/redis_commands/recorder_spec.rb128
-rw-r--r--spec/tasks/dev_rake_spec.rb25
-rw-r--r--spec/tasks/gitlab/background_migrations_rake_spec.rb4
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb13
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb3
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb80
-rw-r--r--spec/tasks/gitlab/db/validate_config_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb8
-rw-r--r--spec/tasks/gitlab/password_rake_spec.rb9
-rw-r--r--spec/tasks/gitlab/web_hook_rake_spec.rb4
-rw-r--r--spec/tooling/danger/customer_success_spec.rb91
-rw-r--r--spec/tooling/graphql/docs/renderer_spec.rb122
-rw-r--r--spec/tooling/lib/tooling/find_codeowners_spec.rb43
-rw-r--r--spec/tooling/quality/test_level_spec.rb4
-rw-r--r--spec/uploaders/avatar_uploader_spec.rb2
-rw-r--r--spec/uploaders/design_management/design_v432x230_uploader_spec.rb2
-rw-r--r--spec/uploaders/favicon_uploader_spec.rb8
-rw-r--r--spec/uploaders/object_storage_spec.rb18
-rw-r--r--spec/views/admin/identities/index.html.haml_spec.rb70
-rw-r--r--spec/views/devise/sessions/new.html.haml_spec.rb94
-rw-r--r--spec/views/groups/group_members/index.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb5
-rw-r--r--spec/views/projects/blob/_viewer.html.haml_spec.rb1
-rw-r--r--spec/views/projects/pages/new.html.haml_spec.rb42
-rw-r--r--spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb4
-rw-r--r--spec/workers/archive_trace_worker_spec.rb31
-rw-r--r--spec/workers/build_finished_worker_spec.rb88
-rw-r--r--spec/workers/build_hooks_worker_spec.rb4
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb21
-rw-r--r--spec/workers/ci/cancel_pipeline_worker_spec.rb50
-rw-r--r--spec/workers/ci/runners/process_runner_version_update_worker_spec.rb48
-rw-r--r--spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb45
-rw-r--r--spec/workers/ci/track_failed_build_worker_spec.rb28
-rw-r--r--spec/workers/concerns/limited_capacity/job_tracker_spec.rb2
-rw-r--r--spec/workers/concerns/waitable_worker_spec.rb40
-rw-r--r--spec/workers/database/batched_background_migration/ci_database_worker_spec.rb2
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb2
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb53
-rw-r--r--spec/workers/merge_requests/create_approval_event_worker_spec.rb51
-rw-r--r--spec/workers/merge_requests/create_approval_note_worker_spec.rb52
-rw-r--r--spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb57
-rw-r--r--spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb56
-rw-r--r--spec/workers/new_issue_worker_spec.rb10
-rw-r--r--spec/workers/packages/cleanup/execute_policy_worker_spec.rb2
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb112
-rw-r--r--spec/workers/post_receive_spec.rb6
-rw-r--r--spec/workers/project_cache_worker_spec.rb2
-rw-r--r--spec/workers/projects/import_export/relation_export_worker_spec.rb36
-rw-r--r--spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb8
-rw-r--r--spec/workers/update_project_statistics_worker_spec.rb28
-rw-r--r--spec/workers/users/deactivate_dormant_users_worker_spec.rb21
1973 files changed, 39133 insertions, 17069 deletions
diff --git a/spec/channels/application_cable/connection_spec.rb b/spec/channels/application_cable/connection_spec.rb
index f5b2cdd2fca..4943669bde0 100644
--- a/spec/channels/application_cable/connection_spec.rb
+++ b/spec/channels/application_cable/connection_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe ApplicationCable::Connection, :clean_gitlab_redis_sessions do
end
context 'with a stale password' do
- let(:partial_password_hash) { build(:user, password: 'some_old_password').authenticatable_salt }
+ let(:partial_password_hash) { build(:user, password: User.random_password).authenticatable_salt }
let(:session_hash) { { 'warden.user.user.key' => [[user.id], partial_password_hash] } }
it 'sets current_user to nil' do
diff --git a/spec/channels/awareness_channel_spec.rb b/spec/channels/awareness_channel_spec.rb
index 8d6dc36f6bd..47b1cd0188f 100644
--- a/spec/channels/awareness_channel_spec.rb
+++ b/spec/channels/awareness_channel_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe AwarenessChannel, :clean_gitlab_redis_shared_state, type: :channe
collaborator = {
id: user.id,
name: user.name,
+ username: user.username,
avatar_url: user.avatar_url(size: 36),
last_activity: Time.zone.now,
last_activity_humanized: ActionController::Base.helpers.distance_of_time_in_words(
@@ -63,7 +64,7 @@ RSpec.describe AwarenessChannel, :clean_gitlab_redis_shared_state, type: :channe
session = AwarenessSession.for("/test")
expect { subscription.unsubscribe_from_channel }
- .to change { session.size}.by(-1)
+ .to change { session.size }.by(-1)
end
end
end
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index 55e8ab7885e..4d1a07a6a75 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -245,9 +245,9 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'expands multiple queue groups correctly' do
expected_workers =
if Gitlab.ee?
- [%w[chat_notification], %w[project_export project_template_export]]
+ [%w[chat_notification], %w[project_export projects_import_export_relation_export project_template_export]]
else
- [%w[chat_notification], %w[project_export]]
+ [%w[chat_notification], %w[project_export projects_import_export_relation_export]]
end
expect(Gitlab::SidekiqCluster)
diff --git a/spec/components/diffs/overflow_warning_component_spec.rb b/spec/components/diffs/overflow_warning_component_spec.rb
index ee4014ee492..88c5de32de7 100644
--- a/spec/components/diffs/overflow_warning_component_spec.rb
+++ b/spec/components/diffs/overflow_warning_component_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Diffs::OverflowWarningComponent, type: :component do
end
describe "rendered component" do
- subject { rendered_component }
+ subject { rendered_content }
context "on a commit page" do
before do
diff --git a/spec/components/diffs/stats_component_spec.rb b/spec/components/diffs/stats_component_spec.rb
index 2e5a5f2ca26..be55c23d040 100644
--- a/spec/components/diffs/stats_component_spec.rb
+++ b/spec/components/diffs/stats_component_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Diffs::StatsComponent, type: :component do
let_it_be(:diff_files) { [diff_file] }
describe "rendered component" do
- subject { rendered_component }
+ subject { page }
let(:element) { page.find(".js-diff-stats-dropdown") }
diff --git a/spec/components/docs/01_overview.html.erb b/spec/components/docs/01_overview.html.erb
new file mode 100644
index 00000000000..da4178ebcb5
--- /dev/null
+++ b/spec/components/docs/01_overview.html.erb
@@ -0,0 +1,20 @@
+---
+title: Welcome to our Lookbook 👋
+---
+
+<p>With Lookbook we can navigate, inspect and interact with our ViewComponent previews.</p>
+
+<h2>Usage</h2>
+
+<ul>
+ <li>Use the sidebar on the left to navigate our component previews.</li>
+ <li>Many previews can be interacted with by making changes in the <em>Params</em> tab.</li>
+ <li>Some previews have additional usage instructions in their <em>Notes</em> tab.</li>
+</ul>
+
+<h2>Learn more</h2>
+
+<ul>
+ <li>Learn all about <a href="https://viewcomponent.org/">ViewComponent</a> and <a href="https://github.com/allmarkedup/lookbook">Lookbook</a>.</li>
+ <li>Have a look at our ViewComponent page in the <a href="https://docs.gitlab.com/ee/development/fe_guide/view_component.html">Frontend development docs</a>.</li>
+</ul>
diff --git a/spec/components/pajamas/avatar_component_spec.rb b/spec/components/pajamas/avatar_component_spec.rb
new file mode 100644
index 00000000000..3b4e4e49fc2
--- /dev/null
+++ b/spec/components/pajamas/avatar_component_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::AvatarComponent, type: :component do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+
+ let(:options) { {} }
+
+ before do
+ render_inline(described_class.new(record, **options))
+ end
+
+ describe "avatar shape" do
+ context "for a User" do
+ let(:record) { user }
+
+ it "has a circle shape" do
+ expect(page).to have_css ".gl-avatar.gl-avatar-circle"
+ end
+ end
+
+ context "for a Project" do
+ let(:record) { project }
+
+ it "has default shape (rect)" do
+ expect(page).to have_css ".gl-avatar"
+ expect(page).not_to have_css ".gl-avatar-circle"
+ end
+ end
+
+ context "for a Group" do
+ let(:record) { group }
+
+ it "has default shape (rect)" do
+ expect(page).to have_css ".gl-avatar"
+ expect(page).not_to have_css ".gl-avatar-circle"
+ end
+ end
+ end
+
+ describe "avatar image" do
+ context "when it has an uploaded image" do
+ let(:record) { project }
+
+ before do
+ allow(record).to receive(:avatar_url).and_return "/example.png"
+ render_inline(described_class.new(record, **options))
+ end
+
+ it "uses the avatar_url as image src" do
+ expect(page).to have_css "img.gl-avatar[src='/example.png?width=64']"
+ end
+
+ it "uses a srcset for higher resolution on retina displays" do
+ expect(page).to have_css "img.gl-avatar[srcset='/example.png?width=64 1x, /example.png?width=128 2x']"
+ end
+
+ it "uses lazy loading" do
+ expect(page).to have_css "img.gl-avatar[loading='lazy']"
+ end
+
+ context "with size option" do
+ let(:options) { { size: 16 } }
+
+ it "uses that size as param for image src and srcset" do
+ expect(page).to have_css(
+ "img.gl-avatar[src='/example.png?width=16'][srcset='/example.png?width=16 1x, /example.png?width=32 2x']"
+ )
+ end
+ end
+ end
+
+ context "when a project or group has no uploaded image" do
+ let(:record) { project }
+
+ it "uses an identicon with the record's initial" do
+ expect(page).to have_css "div.gl-avatar.gl-avatar-identicon", text: record.name[0].upcase
+ end
+
+ context "when the record has no id" do
+ let(:record) { build :group }
+
+ it "uses an identicon with default background color" do
+ expect(page).to have_css "div.gl-avatar.gl-avatar-identicon-bg1"
+ end
+ end
+ end
+
+ context "when a user has no uploaded image" do
+ let(:record) { user }
+
+ it "uses a gravatar" do
+ expect(rendered_component).to match /gravatar\.com/
+ end
+ end
+ end
+
+ describe "options" do
+ let(:record) { user }
+
+ describe "alt" do
+ context "with a value" do
+ let(:options) { { alt: "Profile picture" } }
+
+ it "uses given value as alt text" do
+ expect(page).to have_css ".gl-avatar[alt='Profile picture']"
+ end
+ end
+
+ context "without a value" do
+ it "uses the record's name as alt text" do
+ expect(page).to have_css ".gl-avatar[alt='#{record.name}']"
+ end
+ end
+ end
+
+ describe "class" do
+ let(:options) { { class: 'gl-m-4' } }
+
+ it 'has the correct custom class' do
+ expect(page).to have_css '.gl-avatar.gl-m-4'
+ end
+ end
+
+ describe "size" do
+ let(:options) { { size: 96 } }
+
+ it 'has the correct size class' do
+ expect(page).to have_css '.gl-avatar.gl-avatar-s96'
+ end
+ end
+ end
+end
diff --git a/spec/components/pajamas/banner_component_spec.rb b/spec/components/pajamas/banner_component_spec.rb
index 26468d80c77..861b10c3f69 100644
--- a/spec/components/pajamas/banner_component_spec.rb
+++ b/spec/components/pajamas/banner_component_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Pajamas::BannerComponent, type: :component do
end
let(:title) { "Banner title" }
- let(:content) { "Banner content"}
+ let(:content) { "Banner content" }
let(:options) { {} }
describe 'basic usage' do
diff --git a/spec/components/pajamas/button_component_spec.rb b/spec/components/pajamas/button_component_spec.rb
index a8c96042580..00423fd22a4 100644
--- a/spec/components/pajamas/button_component_spec.rb
+++ b/spec/components/pajamas/button_component_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:content) { "Button content" }
let(:options) { {} }
- describe 'basic usage' do
+ RSpec.shared_examples 'basic button behavior' do
before do
render_inline(subject) do |c|
content
@@ -59,7 +59,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'disabled' do
context 'by default (false)' do
it 'does not have disabled styling and behavior' do
- expect(page).not_to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ expect(page).not_to have_css ".disabled[disabled][aria-disabled]"
end
end
@@ -67,7 +67,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { disabled: true } }
it 'has disabled styling and behavior' do
- expect(page).to have_css ".disabled[disabled='disabled'][aria-disabled='true']"
+ expect(page).to have_css ".disabled[disabled][aria-disabled]"
end
end
end
@@ -75,7 +75,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
describe 'loading' do
context 'by default (false)' do
it 'is not disabled' do
- expect(page).not_to have_css ".disabled[disabled='disabled']"
+ expect(page).not_to have_css ".disabled[disabled]"
end
it 'does not render a spinner' do
@@ -87,7 +87,7 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
let(:options) { { loading: true } }
it 'is disabled' do
- expect(page).to have_css ".disabled[disabled='disabled']"
+ expect(page).to have_css ".disabled[disabled]"
end
it 'renders a spinner' do
@@ -218,9 +218,13 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
end
end
end
+ end
+
+ context 'button component renders a button' do
+ include_examples 'basic button behavior'
describe 'type' do
- context 'by default (without href)' do
+ context 'by default' do
it 'has type "button"' do
expect(page).to have_css "button[type='button']"
end
@@ -238,34 +242,42 @@ RSpec.describe Pajamas::ButtonComponent, type: :component do
end
end
- context 'when set to unkown type' do
+ context 'when set to unknown type' do
let(:options) { { type: :madeup } }
it 'has type "button"' do
expect(page).to have_css "button[type='button']"
end
end
+ end
+ end
- context 'for links (with href)' do
- let(:options) { { href: 'https://example.com', type: :reset } }
+ context 'button component renders a link' do
+ let(:options) { { href: 'https://gitlab.com', target: '_blank' } }
- it 'ignores type' do
- expect(page).not_to have_css "[type]"
- end
- end
+ it "renders a link instead of the button" do
+ expect(page).not_to have_css "button[type='button']"
+ expect(page).to have_css "a[href='https://gitlab.com'][target='_blank']"
end
- describe 'link button' do
- it 'renders a button tag with type="button" when "href" is not set' do
- expect(page).to have_css "button[type='button']"
+ include_examples 'basic button behavior'
+
+ describe 'type' do
+ let(:options) { { href: 'https://example.com', type: :reset } }
+
+ it 'ignores type' do
+ expect(page).not_to have_css "[type]"
end
+ end
+
+ describe 'method' do
+ where(:method) { [:get, :post, :put, :delete, :patch] }
- context 'when "href" is provided' do
- let(:options) { { href: 'https://gitlab.com', target: '_blank' } }
+ let(:options) { { href: 'https://gitlab.com', method: method } }
- it "renders a link instead of the button" do
- expect(page).not_to have_css "button[type='button']"
- expect(page).to have_css "a[href='https://gitlab.com'][target='_blank']"
+ with_them do
+ it 'has the correct data-method attribute' do
+ expect(page).to have_css "a[data-method='#{method}']"
end
end
end
diff --git a/spec/components/pajamas/checkbox_component_spec.rb b/spec/components/pajamas/checkbox_component_spec.rb
index d79c537a30e..3d50509ef10 100644
--- a/spec/components/pajamas/checkbox_component_spec.rb
+++ b/spec/components/pajamas/checkbox_component_spec.rb
@@ -8,12 +8,6 @@ RSpec.describe Pajamas::CheckboxComponent, :aggregate_failures, type: :component
let_it_be(:label) { "Show one file at a time on merge request's Changes tab" }
let_it_be(:help_text) { 'Instead of all the files changed, show only one file at a time.' }
- RSpec.shared_examples 'it renders unchecked checkbox with value of `1`' do
- it 'renders unchecked checkbox with value of `1`' do
- expect(page).to have_unchecked_field(label, with: '1')
- end
- end
-
context 'with default options' do
before do
fake_form_for do |form|
diff --git a/spec/components/pajamas/checkbox_tag_component_spec.rb b/spec/components/pajamas/checkbox_tag_component_spec.rb
new file mode 100644
index 00000000000..bca7a6005d5
--- /dev/null
+++ b/spec/components/pajamas/checkbox_tag_component_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+require "spec_helper"
+
+RSpec.describe Pajamas::CheckboxTagComponent, :aggregate_failures, type: :component do
+ let_it_be(:name) { :view_diffs_file_by_file }
+ let_it_be(:label) { "Show one file at a time on merge request's Changes tab" }
+ let_it_be(:help_text) { 'Instead of all the files changed, show only one file at a time.' }
+
+ context 'with default options' do
+ before do
+ render_inline(described_class.new(name: name)) do |c|
+ c.label { label }
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ include_examples 'it does not render help text'
+ end
+
+ context 'with custom options' do
+ let_it_be(:value) { 'yes' }
+ let_it_be(:checkbox_options) { { class: 'checkbox-foo-bar', checked: true } }
+ let_it_be(:label_options) { { class: 'label-foo-bar' } }
+
+ before do
+ render_inline(
+ described_class.new(
+ name: name,
+ value: value,
+ checked: true,
+ checkbox_options: checkbox_options,
+ label_options: label_options
+ )
+ ) do |c|
+ c.label { label }
+ end
+ end
+
+ it 'renders checked checkbox with value of `yes`' do
+ expect(page).to have_checked_field(label, with: value, class: checkbox_options[:class])
+ end
+
+ it 'adds CSS class to label' do
+ expect(page).to have_selector('label.label-foo-bar')
+ end
+ end
+
+ context 'with `help_text` slot' do
+ before do
+ render_inline(described_class.new(name: name)) do |c|
+ c.label { label }
+ c.help_text { help_text }
+ end
+ end
+
+ include_examples 'it renders unchecked checkbox with value of `1`'
+ include_examples 'it renders help text'
+ end
+end
diff --git a/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb b/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb
index 7a792592b3c..4994abcfb93 100644
--- a/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb
+++ b/spec/components/pajamas/concerns/checkbox_radio_label_with_help_text_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
attr_reader(
:form,
:method,
+ :name,
:label_argument,
:help_text_argument,
:label_options,
@@ -16,8 +17,9 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
)
def initialize(
- form:,
- method:,
+ form: nil,
+ method: nil,
+ name: nil,
label: nil,
help_text: nil,
label_options: {},
@@ -26,6 +28,7 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
)
@form = form
@method = method
+ @name = name
@label_argument = label
@help_text_argument = help_text
@label_options = label_options
@@ -46,19 +49,25 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
end
include Pajamas::Concerns::CheckboxRadioLabelWithHelpText
+ include ActionView::Context
include ActionView::Helpers::TagHelper
+ include ActionView::Helpers::FormTagHelper
end
end
- let_it_be(:method) { 'username' }
+ let_it_be(:method_or_name) { 'username' }
let_it_be(:label_options) { { class: 'foo-bar' } }
let_it_be(:value) { 'Foo bar' }
+ let_it_be(:expected_label_entry) { '<span>Label argument</span>' }
+ let_it_be(:expected_label_with_help_text_entry) do
+ '<span>Label argument</span><p class="help-text" data-testid="pajamas-component-help-text">Help text argument</p>'
+ end
describe '#render_label_with_help_text' do
it 'calls `#format_options` with correct arguments' do
allow(form).to receive(:label)
- component = component_class.new(form: form, method: method, label_options: label_options, value: value)
+ component = component_class.new(form: form, method: method_or_name, label_options: label_options, value: value)
expect(component).to receive(:format_options).with(
options: label_options,
@@ -73,16 +82,13 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
it 'calls `form.label` with `label` and `help_text` arguments used in the block' do
component = component_class.new(
form: form,
- method: method,
+ method: method_or_name,
label: 'Label argument',
help_text: 'Help text argument'
)
- expected_label_entry = '<span>Label argument</span><p class="help-text"' \
- ' data-testid="pajamas-component-help-text">Help text argument</p>'
-
- expect(form).to receive(:label).with(method, {}) do |&block|
- expect(block.call).to eq(expected_label_entry)
+ expect(form).to receive(:label).with(method_or_name, {}) do |&block|
+ expect(block.call).to eq(expected_label_with_help_text_entry)
end
component.render_label_with_help_text
@@ -93,13 +99,11 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
it 'calls `form.label` with `label` argument used in the block' do
component = component_class.new(
form: form,
- method: method,
+ method: method_or_name,
label: 'Label argument'
)
- expected_label_entry = '<span>Label argument</span>'
-
- expect(form).to receive(:label).with(method, {}) do |&block|
+ expect(form).to receive(:label).with(method_or_name, {}) do |&block|
expect(block.call).to eq(expected_label_entry)
end
@@ -107,4 +111,49 @@ RSpec.describe Pajamas::Concerns::CheckboxRadioLabelWithHelpText do
end
end
end
+
+ describe '#render_label_tag_with_help_text' do
+ it 'calls `#format_options` with correct arguments' do
+ component = component_class.new(name: method_or_name, label_options: label_options, value: value)
+
+ expect(component).to receive(:format_options).with(
+ options: label_options,
+ css_classes: ['custom-control-label'],
+ additional_options: { value: value }
+ )
+
+ component.render_label_tag_with_help_text
+ end
+
+ context 'when `help_text` argument is passed' do
+ it 'calls `label_tag` with `label` and `help_text` arguments used in the block' do
+ component = component_class.new(
+ name: method_or_name,
+ label: 'Label argument',
+ help_text: 'Help text argument'
+ )
+
+ expect(component).to receive(:label_tag).with(method_or_name, {}) do |&block|
+ expect(block.call).to eq(expected_label_with_help_text_entry)
+ end
+
+ component.render_label_tag_with_help_text
+ end
+ end
+
+ context 'when `help_text` argument is not passed' do
+ it 'calls `label_tag` with `label` argument used in the block' do
+ component = component_class.new(
+ name: method_or_name,
+ label: 'Label argument'
+ )
+
+ expect(component).to receive(:label_tag).with(method_or_name, {}) do |&block|
+ expect(block.call).to eq(expected_label_entry)
+ end
+
+ component.render_label_tag_with_help_text
+ end
+ end
+ end
end
diff --git a/spec/components/previews/pajamas/alert_component_preview.rb b/spec/components/previews/pajamas/alert_component_preview.rb
new file mode 100644
index 00000000000..9a6b77715f5
--- /dev/null
+++ b/spec/components/previews/pajamas/alert_component_preview.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+module Pajamas
+ class AlertComponentPreview < ViewComponent::Preview
+ # @param body text
+ # @param dismissible toggle
+ # @param variant select [info, warning, success, danger, tip]
+ def default(body: nil, dismissible: true, variant: :info)
+ render(Pajamas::AlertComponent.new(
+ title: "Title",
+ dismissible: dismissible,
+ variant: variant.to_sym
+ )) do |c|
+ if body
+ c.with_body { body }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/avatar_component_preview.rb b/spec/components/previews/pajamas/avatar_component_preview.rb
new file mode 100644
index 00000000000..e5cdde1ccef
--- /dev/null
+++ b/spec/components/previews/pajamas/avatar_component_preview.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+module Pajamas
+ class AvatarComponentPreview < ViewComponent::Preview
+ # Avatar
+ # ----
+ # See its design reference [here](https://design.gitlab.com/components/avatar).
+ def default
+ user
+ end
+
+ # We show user avatars in a circle.
+ # @param size select [16, 24, 32, 48, 64, 96]
+ def user(size: 64)
+ render(Pajamas::AvatarComponent.new(User.first, size: size))
+ end
+
+ # @param size select [16, 24, 32, 48, 64, 96]
+ def project(size: 64)
+ render(Pajamas::AvatarComponent.new(Project.first, size: size))
+ end
+
+ # @param size select [16, 24, 32, 48, 64, 96]
+ def group(size: 64)
+ render(Pajamas::AvatarComponent.new(Group.first, size: size))
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/banner_component_preview.rb b/spec/components/previews/pajamas/banner_component_preview.rb
new file mode 100644
index 00000000000..861e3ff95dc
--- /dev/null
+++ b/spec/components/previews/pajamas/banner_component_preview.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+module Pajamas
+ class BannerComponentPreview < ViewComponent::Preview
+ # Banner
+ # ----
+ # See its design reference [here](https://design.gitlab.com/components/banner).
+ #
+ # @param button_text text
+ # @param button_link text
+ # @param content textarea
+ # @param embedded toggle
+ # @param variant select [introduction, promotion]
+ def default(
+ button_text: "Learn more",
+ button_link: "https://about.gitlab.com/",
+ content: "Add your message here.",
+ embedded: false,
+ variant: :promotion
+ )
+ render(Pajamas::BannerComponent.new(
+ button_text: button_text,
+ button_link: button_link,
+ embedded: embedded,
+ svg_path: "illustrations/autodevops.svg",
+ variant: variant
+ )) do |c|
+ content_tag :p, content
+ end
+ end
+
+ # Use the `primary_action` slot instead of `button_text` and `button_link` if you need something more special,
+ # like rendering a partial that holds your button.
+ def with_primary_action_slot
+ render(Pajamas::BannerComponent.new) do |c|
+ c.primary_action do
+ # You could also `render` another partial here.
+ tag.button "I'm special", class: "btn btn-md btn-confirm gl-button"
+ end
+ content_tag :p, "This banner uses the primary_action slot."
+ end
+ end
+
+ # Use the `illustration` slot instead of `svg_path` if your illustration is not part or the asset pipeline,
+ # but for example, an inline SVG via `custom_icon`.
+ def with_illustration_slot
+ render(Pajamas::BannerComponent.new) do |c|
+ c.illustration do
+ '<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="white" stroke="white" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-thumbs-up"><path d="M14 9V5a3 3 0 0 0-3-3l-4 9v11h11.28a2 2 0 0 0 2-1.7l1.38-9a2 2 0 0 0-2-2.3zM7 22H4a2 2 0 0 1-2-2v-7a2 2 0 0 1 2-2h3"></path></svg>'.html_safe # rubocop:disable Layout/LineLength
+ end
+ content_tag :p, "This banner uses the illustration slot."
+ end
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/button_component_preview.rb b/spec/components/previews/pajamas/button_component_preview.rb
new file mode 100644
index 00000000000..1f61d9cf2bc
--- /dev/null
+++ b/spec/components/previews/pajamas/button_component_preview.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+module Pajamas
+ class ButtonComponentPreview < ViewComponent::Preview
+ # Button
+ # ----
+ # See its design reference [here](https://design.gitlab.com/components/banner).
+ #
+ # @param category select [primary, secondary, tertiary]
+ # @param variant select [default, confirm, danger, dashed, link, reset]
+ # @param size select [small, medium]
+ # @param type select [button, reset, submit]
+ # @param disabled toggle
+ # @param loading toggle
+ # @param block toggle
+ # @param selected toggle
+ # @param icon text
+ # @param text text
+ def default( # rubocop:disable Metrics/ParameterLists
+ category: :primary,
+ variant: :default,
+ size: :medium,
+ type: :button,
+ disabled: false,
+ loading: false,
+ block: false,
+ selected: false,
+ icon: "pencil",
+ text: "Edit"
+ )
+ render(Pajamas::ButtonComponent.new(
+ category: category,
+ variant: variant,
+ size: size,
+ type: type,
+ disabled: disabled,
+ loading: loading,
+ block: block,
+ selected: selected,
+ icon: icon
+ )) do
+ text.presence
+ end
+ end
+
+ # The component can also be used to create links that look and feel like buttons.
+ # Just provide a `href` and optionally a `target` to create an `<a>` tag.
+ def link
+ render(Pajamas::ButtonComponent.new(
+ href: "https://gitlab.com",
+ target: "_blank"
+ )) do
+ "This is a link"
+ end
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/card_component_preview.rb b/spec/components/previews/pajamas/card_component_preview.rb
new file mode 100644
index 00000000000..61d1f8db9e1
--- /dev/null
+++ b/spec/components/previews/pajamas/card_component_preview.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+module Pajamas
+ class CardComponentPreview < ViewComponent::Preview
+ # Card
+ # ----
+ # See its design reference [here](https://design.gitlab.com/components/card).
+ #
+ # @param header text
+ # @param body textarea
+ # @param footer text
+ def default(header: nil, body: "Every card has a body.", footer: nil)
+ render(Pajamas::CardComponent.new) do |c|
+ if header
+ c.with_header { header }
+ end
+
+ c.with_body do
+ content_tag(:p, body)
+ end
+
+ if footer
+ c.with_footer { footer }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/components/previews/pajamas/spinner_component_preview.rb b/spec/components/previews/pajamas/spinner_component_preview.rb
new file mode 100644
index 00000000000..149bfddcfc2
--- /dev/null
+++ b/spec/components/previews/pajamas/spinner_component_preview.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+module Pajamas
+ class SpinnerComponentPreview < ViewComponent::Preview
+ # Spinner
+ # ----
+ # See its design reference [here](https://design.gitlab.com/components/spinner).
+ #
+ # @param inline toggle
+ # @param label text
+ # @param size select [[small, sm], [medium, md], [large, lg], [extra large, xl]]
+ def default(inline: false, label: "Loading", size: :md)
+ render(Pajamas::SpinnerComponent.new(inline: inline, label: label, size: size))
+ end
+
+ # Use a light spinner on dark backgrounds
+ #
+ # @display bg_color "#222"
+ def light
+ render(Pajamas::SpinnerComponent.new(color: :light))
+ end
+ end
+end
diff --git a/spec/contracts/consumer/endpoints/project/pipelines.js b/spec/contracts/consumer/endpoints/project/pipelines.js
deleted file mode 100644
index 33758dee75b..00000000000
--- a/spec/contracts/consumer/endpoints/project/pipelines.js
+++ /dev/null
@@ -1,16 +0,0 @@
-import { request } from 'axios';
-
-export function getProjectPipelines(endpoint) {
- const { url } = endpoint;
-
- return request({
- method: 'GET',
- baseURL: url,
- url: '/gitlab-org/gitlab-qa/-/pipelines.json',
- headers: { Accept: '*/*' },
- params: {
- scope: 'all',
- page: 1,
- },
- }).then((response) => response.data);
-}
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js
index b53e4bb335d..673aad721b3 100644
--- a/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/merge_request/diffs_batch.fixture.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { Matchers } from '@pact-foundation/pact';
const body = {
@@ -73,8 +71,12 @@ const DiffsBatch = {
body,
},
- request: {
+ scenario: {
+ state: 'a merge request with diffs exists',
uponReceiving: 'a request for diff lines',
+ },
+
+ request: {
withRequest: {
method: 'GET',
path: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_batch.json',
@@ -87,5 +89,3 @@ const DiffsBatch = {
};
export { DiffsBatch };
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js
index 39dbcf78ee7..2fee4a02023 100644
--- a/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/merge_request/diffs_metadata.fixture.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { Matchers } from '@pact-foundation/pact';
const body = {
@@ -81,8 +79,12 @@ const DiffsMetadata = {
body,
},
+ scenario: {
+ state: 'a merge request exists',
+ uponReceiving: 'a request for diffs metadata',
+ },
+
request: {
- uponReceiving: 'a request for Diffs Metadata',
withRequest: {
method: 'GET',
path: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_metadata.json',
@@ -94,5 +96,3 @@ const DiffsMetadata = {
};
export { DiffsMetadata };
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js b/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js
index af0962a01cb..8c392395e1c 100644
--- a/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/merge_request/discussions.fixture.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { Matchers } from '@pact-foundation/pact';
const body = Matchers.eachLike({
@@ -70,8 +68,12 @@ const Discussions = {
body,
},
- request: {
+ scenario: {
+ state: 'a merge request with discussions exists',
uponReceiving: 'a request for discussions',
+ },
+
+ request: {
withRequest: {
method: 'GET',
path: '/gitlab-org/gitlab-qa/-/merge_requests/1/discussions.json',
@@ -83,5 +85,3 @@ const Discussions = {
};
export { Discussions };
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/create_a_new_pipeline.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/create_a_new_pipeline.fixture.js
new file mode 100644
index 00000000000..68063d2fb0c
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/pipeline/create_a_new_pipeline.fixture.js
@@ -0,0 +1,39 @@
+import { Matchers } from '@pact-foundation/pact';
+import { REDIRECT_HTML } from '../../../helpers/common_regex_patterns';
+
+const body = Matchers.term({
+ matcher: REDIRECT_HTML,
+ generate:
+ '<html><body>You are being <a href="http://example.org/gitlab-org/gitlab-qa/-/pipelines/5">redirected</a>.</body></html>',
+});
+
+const NewProjectPipeline = {
+ success: {
+ status: 302,
+ headers: {
+ 'Content-Type': 'text/html; charset=utf-8',
+ },
+ body,
+ },
+
+ scenario: {
+ state: 'a project with a valid .gitlab-ci.yml configuration exists',
+ uponReceiving: 'a request to create a new pipeline',
+ },
+
+ request: {
+ withRequest: {
+ method: 'POST',
+ path: '/gitlab-org/gitlab-qa/-/pipelines',
+ headers: {
+ Accept: '*/*',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body: {
+ ref: 'master',
+ },
+ },
+ },
+};
+
+export { NewProjectPipeline };
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/delete_pipeline.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/delete_pipeline.fixture.js
new file mode 100644
index 00000000000..2e3e7355b99
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/pipeline/delete_pipeline.fixture.js
@@ -0,0 +1,24 @@
+const DeletePipeline = {
+ success: {
+ status: 200,
+ headers: {
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ },
+
+ scenario: {
+ state: 'a pipeline for a project exists',
+ uponReceiving: 'a request to delete the pipeline',
+ },
+
+ request: {
+ method: 'POST',
+ path: '/api/graphql',
+ },
+
+ variables: {
+ id: 'gid://gitlab/Ci::Pipeline/316112',
+ },
+};
+
+export { DeletePipeline };
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js
index 8a7663325b9..a982e927572 100644
--- a/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/pipeline/get_list_project_pipelines.fixture.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { Matchers } from '@pact-foundation/pact';
import {
URL,
@@ -225,8 +223,12 @@ const ProjectPipelines = {
body,
},
- request: {
+ scenario: {
+ state: 'a few pipelines for a project exists',
uponReceiving: 'a request for a list of project pipelines',
+ },
+
+ request: {
withRequest: {
method: 'GET',
path: '/gitlab-org/gitlab-qa/-/pipelines.json',
@@ -239,5 +241,3 @@ const ProjectPipelines = {
};
export { ProjectPipelines };
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js
index f51ed9c2c74..b14a230d2e0 100644
--- a/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js
+++ b/spec/contracts/consumer/fixtures/project/pipeline/get_pipeline_header_data.fixture.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { Matchers } from '@pact-foundation/pact';
import {
JOB_STATUSES,
@@ -83,6 +81,11 @@ const PipelineHeaderData = {
body,
},
+ scenario: {
+ state: 'a pipeline for a project exists',
+ uponReceiving: 'a request for the pipeline header data',
+ },
+
request: {
method: 'POST',
path: '/api/graphql',
@@ -95,5 +98,3 @@ const PipelineHeaderData = {
};
export { PipelineHeaderData };
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/fixtures/project/pipeline_schedule/update_pipeline_schedule.fixture.js b/spec/contracts/consumer/fixtures/project/pipeline_schedule/update_pipeline_schedule.fixture.js
new file mode 100644
index 00000000000..acfab14851a
--- /dev/null
+++ b/spec/contracts/consumer/fixtures/project/pipeline_schedule/update_pipeline_schedule.fixture.js
@@ -0,0 +1,44 @@
+import { Matchers } from '@pact-foundation/pact';
+import { REDIRECT_HTML } from '../../../helpers/common_regex_patterns';
+
+const body = Matchers.term({
+ matcher: REDIRECT_HTML,
+ generate:
+ '<html><body>You are being <a href="http://example.org/gitlab-org/gitlab-qa/-/pipelines/5">redirected</a>.</body></html>',
+});
+
+const UpdatePipelineSchedule = {
+ success: {
+ status: 302,
+ headers: {
+ 'Content-Type': 'text/html; charset=utf-8',
+ },
+ body,
+ },
+
+ scenario: {
+ state: 'a project with a pipeline schedule exists',
+ uponReceiving: 'a request to edit a pipeline schedule',
+ },
+
+ request: {
+ withRequest: {
+ method: 'PUT',
+ path: '/gitlab-org/gitlab-qa/-/pipeline_schedules/25',
+ headers: {
+ Accept: '*/*',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ body: {
+ schedule: {
+ description: 'bar',
+ cron: '0 1 * * *',
+ cron_timezone: 'UTC',
+ active: true,
+ },
+ },
+ },
+ },
+};
+
+export { UpdatePipelineSchedule };
diff --git a/spec/contracts/consumer/helpers/common_regex_patterns.js b/spec/contracts/consumer/helpers/common_regex_patterns.js
index 664a71ab8a9..78dfeb7748f 100644
--- a/spec/contracts/consumer/helpers/common_regex_patterns.js
+++ b/spec/contracts/consumer/helpers/common_regex_patterns.js
@@ -3,6 +3,7 @@
*/
export const URL = '^(http|https)://[a-z0-9]+([-.]{1}[a-z0-9]+)*.[a-z]{2,5}(:[0-9]{1,5})?(/.*)?$';
export const URL_PATH = '^/[a-zA-Z0-9#-=?_]+$';
+export const REDIRECT_HTML = 'You are being <a href=\\"(.)+\\">redirected</a>.';
// Pipelines
export const PIPELINE_GROUPS =
diff --git a/spec/contracts/consumer/resources/api/pipeline_schedules.js b/spec/contracts/consumer/resources/api/pipeline_schedules.js
new file mode 100644
index 00000000000..ad04e59b9cd
--- /dev/null
+++ b/spec/contracts/consumer/resources/api/pipeline_schedules.js
@@ -0,0 +1,26 @@
+import axios from 'axios';
+
+export async function updatePipelineSchedule(endpoint) {
+ const { url } = endpoint;
+
+ return axios({
+ method: 'PUT',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/pipeline_schedules/25',
+ headers: {
+ Accept: '*/*',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ data: {
+ schedule: {
+ description: 'bar',
+ cron: '0 1 * * *',
+ cron_timezone: 'UTC',
+ active: true,
+ },
+ },
+ validateStatus: (status) => {
+ return status === 302;
+ },
+ });
+}
diff --git a/spec/contracts/consumer/endpoints/project/merge_requests.js b/spec/contracts/consumer/resources/api/project/merge_requests.js
index 38773e5fb10..e52743cede2 100644
--- a/spec/contracts/consumer/endpoints/project/merge_requests.js
+++ b/spec/contracts/consumer/resources/api/project/merge_requests.js
@@ -1,9 +1,9 @@
-import { request } from 'axios';
+import axios from 'axios';
-export function getDiffsMetadata(endpoint) {
+export async function getDiffsMetadata(endpoint) {
const { url } = endpoint;
- return request({
+ return axios({
method: 'GET',
baseURL: url,
url: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_metadata.json',
@@ -11,10 +11,10 @@ export function getDiffsMetadata(endpoint) {
}).then((response) => response.data);
}
-export function getDiscussions(endpoint) {
+export async function getDiscussions(endpoint) {
const { url } = endpoint;
- return request({
+ return axios({
method: 'GET',
baseURL: url,
url: '/gitlab-org/gitlab-qa/-/merge_requests/1/discussions.json',
@@ -22,10 +22,10 @@ export function getDiscussions(endpoint) {
}).then((response) => response.data);
}
-export function getDiffsBatch(endpoint) {
+export async function getDiffsBatch(endpoint) {
const { url } = endpoint;
- return request({
+ return axios({
method: 'GET',
baseURL: url,
url: '/gitlab-org/gitlab-qa/-/merge_requests/1/diffs_batch.json?page=0',
diff --git a/spec/contracts/consumer/resources/api/project/pipelines.js b/spec/contracts/consumer/resources/api/project/pipelines.js
new file mode 100644
index 00000000000..8c6f5199666
--- /dev/null
+++ b/spec/contracts/consumer/resources/api/project/pipelines.js
@@ -0,0 +1,34 @@
+import axios from 'axios';
+
+export async function getProjectPipelines(endpoint) {
+ const { url } = endpoint;
+
+ return axios({
+ method: 'GET',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/pipelines.json',
+ headers: { Accept: '*/*' },
+ params: {
+ scope: 'all',
+ page: 1,
+ },
+ }).then((response) => response.data);
+}
+
+export async function postProjectPipelines(endpoint) {
+ const { url } = endpoint;
+
+ return axios({
+ method: 'POST',
+ baseURL: url,
+ url: '/gitlab-org/gitlab-qa/-/pipelines',
+ headers: {
+ Accept: '*/*',
+ 'Content-Type': 'application/json; charset=utf-8',
+ },
+ data: { ref: 'master' },
+ validateStatus: (status) => {
+ return status === 302;
+ },
+ });
+}
diff --git a/spec/contracts/consumer/resources/graphql/pipelines.js b/spec/contracts/consumer/resources/graphql/pipelines.js
index 4f7ce58891c..48724e15eb8 100644
--- a/spec/contracts/consumer/resources/graphql/pipelines.js
+++ b/spec/contracts/consumer/resources/graphql/pipelines.js
@@ -16,6 +16,27 @@ export async function getPipelineHeaderDataRequest(endpoint) {
};
return axios({
+ method: 'POST',
+ baseURL: url,
+ url: '/api/graphql',
+ headers: { Accept: '*/*' },
+ data: graphqlQuery,
+ });
+}
+
+export async function deletePipeline(endpoint) {
+ const { url } = endpoint;
+ const query = await extractGraphQLQuery(
+ 'app/assets/javascripts/pipelines/graphql/mutations/delete_pipeline.mutation.graphql',
+ );
+ const graphqlQuery = {
+ query,
+ variables: {
+ id: 'gid://gitlab/Ci::Pipeline/316112',
+ },
+ };
+
+ return axios({
baseURL: url,
url: '/api/graphql',
method: 'POST',
diff --git a/spec/contracts/consumer/specs/project/merge_request/show.spec.js b/spec/contracts/consumer/specs/project/merge_request/show.spec.js
index 8c6e029cb12..4183e19435a 100644
--- a/spec/contracts/consumer/specs/project/merge_request/show.spec.js
+++ b/spec/contracts/consumer/specs/project/merge_request/show.spec.js
@@ -1,5 +1,3 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { pactWith } from 'jest-pact';
import { DiffsBatch } from '../../../fixtures/project/merge_request/diffs_batch.fixture';
@@ -9,7 +7,7 @@ import {
getDiffsBatch,
getDiffsMetadata,
getDiscussions,
-} from '../../../endpoints/project/merge_requests';
+} from '../../../resources/api/project/merge_requests';
const CONSUMER_NAME = 'MergeRequest#show';
const CONSUMER_LOG = '../logs/consumer.log';
@@ -31,19 +29,19 @@ pactWith(
describe(DIFFS_BATCH_PROVIDER_NAME, () => {
beforeEach(() => {
const interaction = {
- state: 'a merge request with diffs exists',
+ ...DiffsBatch.scenario,
...DiffsBatch.request,
willRespondWith: DiffsBatch.success,
};
provider.addInteraction(interaction);
});
- it('returns a successful body', () => {
- return getDiffsBatch({
+ it('returns a successful body', async () => {
+ const diffsBatch = await getDiffsBatch({
url: provider.mockService.baseUrl,
- }).then((diffsBatch) => {
- expect(diffsBatch).toEqual(DiffsBatch.body);
});
+
+ expect(diffsBatch).toEqual(DiffsBatch.body);
});
});
},
@@ -61,19 +59,19 @@ pactWith(
describe(DISCUSSIONS_PROVIDER_NAME, () => {
beforeEach(() => {
const interaction = {
- state: 'a merge request with discussions exists',
+ ...Discussions.scenario,
...Discussions.request,
willRespondWith: Discussions.success,
};
provider.addInteraction(interaction);
});
- it('return a successful body', () => {
- return getDiscussions({
+ it('return a successful body', async () => {
+ const discussions = await getDiscussions({
url: provider.mockService.baseUrl,
- }).then((discussions) => {
- expect(discussions).toEqual(Discussions.body);
});
+
+ expect(discussions).toEqual(Discussions.body);
});
});
},
@@ -91,22 +89,20 @@ pactWith(
describe(DIFFS_METADATA_PROVIDER_NAME, () => {
beforeEach(() => {
const interaction = {
- state: 'a merge request exists',
+ ...DiffsMetadata.scenario,
...DiffsMetadata.request,
willRespondWith: DiffsMetadata.success,
};
provider.addInteraction(interaction);
});
- it('return a successful body', () => {
- return getDiffsMetadata({
+ it('return a successful body', async () => {
+ const diffsMetadata = await getDiffsMetadata({
url: provider.mockService.baseUrl,
- }).then((diffsMetadata) => {
- expect(diffsMetadata).toEqual(DiffsMetadata.body);
});
+
+ expect(diffsMetadata).toEqual(DiffsMetadata.body);
});
});
},
);
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/specs/project/pipeline/index.spec.js b/spec/contracts/consumer/specs/project/pipeline/index.spec.js
index 1c0358a3e28..1453435d637 100644
--- a/spec/contracts/consumer/specs/project/pipeline/index.spec.js
+++ b/spec/contracts/consumer/specs/project/pipeline/index.spec.js
@@ -1,9 +1,7 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { pactWith } from 'jest-pact';
import { ProjectPipelines } from '../../../fixtures/project/pipeline/get_list_project_pipelines.fixture';
-import { getProjectPipelines } from '../../../endpoints/project/pipelines';
+import { getProjectPipelines } from '../../../resources/api/project/pipelines';
const CONSUMER_NAME = 'Pipelines#index';
const CONSUMER_LOG = '../logs/consumer.log';
@@ -23,22 +21,20 @@ pactWith(
describe(PROVIDER_NAME, () => {
beforeEach(() => {
const interaction = {
- state: 'a few pipelines for a project exists',
+ ...ProjectPipelines.scenario,
...ProjectPipelines.request,
willRespondWith: ProjectPipelines.success,
};
provider.addInteraction(interaction);
});
- it('returns a successful body', () => {
- return getProjectPipelines({
+ it('returns a successful body', async () => {
+ const pipelines = await getProjectPipelines({
url: provider.mockService.baseUrl,
- }).then((pipelines) => {
- expect(pipelines).toEqual(ProjectPipelines.body);
});
+
+ expect(pipelines).toEqual(ProjectPipelines.body);
});
});
},
);
-
-/* eslint-enable @gitlab/require-i18n-strings */
diff --git a/spec/contracts/consumer/specs/project/pipeline/new.spec.js b/spec/contracts/consumer/specs/project/pipeline/new.spec.js
new file mode 100644
index 00000000000..c3824d5979e
--- /dev/null
+++ b/spec/contracts/consumer/specs/project/pipeline/new.spec.js
@@ -0,0 +1,41 @@
+import { pactWith } from 'jest-pact';
+
+import { NewProjectPipeline } from '../../../fixtures/project/pipeline/create_a_new_pipeline.fixture';
+import { postProjectPipelines } from '../../../resources/api/project/pipelines';
+
+const CONSUMER_NAME = 'Pipelines#new';
+const CONSUMER_LOG = '../logs/consumer.log';
+const CONTRACT_DIR = '../contracts/project/pipeline/new';
+const PROVIDER_NAME = 'POST Create a new pipeline';
+
+// API endpoint: /pipelines.json
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(PROVIDER_NAME, () => {
+ beforeEach(async () => {
+ const interaction = {
+ ...NewProjectPipeline.scenario,
+ ...NewProjectPipeline.request,
+ willRespondWith: NewProjectPipeline.success,
+ };
+
+ provider.addInteraction(interaction);
+ });
+
+ it('returns a successful body', async () => {
+ const newPipeline = await postProjectPipelines({
+ url: provider.mockService.baseUrl,
+ });
+
+ expect(newPipeline.status).toEqual(NewProjectPipeline.success.status);
+ });
+ });
+ },
+);
diff --git a/spec/contracts/consumer/specs/project/pipeline/show.spec.js b/spec/contracts/consumer/specs/project/pipeline/show.spec.js
index 0f1cc1c3108..be6abb78eb5 100644
--- a/spec/contracts/consumer/specs/project/pipeline/show.spec.js
+++ b/spec/contracts/consumer/specs/project/pipeline/show.spec.js
@@ -1,36 +1,37 @@
-/* eslint-disable @gitlab/require-i18n-strings */
-
import { pactWith } from 'jest-pact';
import { GraphQLInteraction } from '@pact-foundation/pact';
import { extractGraphQLQuery } from '../../../helpers/graphql_query_extractor';
import { PipelineHeaderData } from '../../../fixtures/project/pipeline/get_pipeline_header_data.fixture';
-import { getPipelineHeaderDataRequest } from '../../../resources/graphql/pipelines';
+import { DeletePipeline } from '../../../fixtures/project/pipeline/delete_pipeline.fixture';
+
+import { getPipelineHeaderDataRequest, deletePipeline } from '../../../resources/graphql/pipelines';
const CONSUMER_NAME = 'Pipelines#show';
const CONSUMER_LOG = '../logs/consumer.log';
const CONTRACT_DIR = '../contracts/project/pipeline/show';
-const PROVIDER_NAME = 'GET pipeline header data';
+const GET_PIPELINE_HEADER_DATA_PROVIDER_NAME = 'GET pipeline header data';
+const DELETE_PIPELINE_PROVIDER_NAME = 'DELETE pipeline';
// GraphQL query: getPipelineHeaderData
pactWith(
{
consumer: CONSUMER_NAME,
- provider: PROVIDER_NAME,
+ provider: GET_PIPELINE_HEADER_DATA_PROVIDER_NAME,
log: CONSUMER_LOG,
dir: CONTRACT_DIR,
},
(provider) => {
- describe(PROVIDER_NAME, () => {
+ describe(GET_PIPELINE_HEADER_DATA_PROVIDER_NAME, () => {
beforeEach(async () => {
const query = await extractGraphQLQuery(
'app/assets/javascripts/pipelines/graphql/queries/get_pipeline_header_data.query.graphql',
);
const graphqlQuery = new GraphQLInteraction()
- .given('a pipeline for a project exists')
- .uponReceiving('a request for the pipeline header data')
+ .given(PipelineHeaderData.scenario.state)
+ .uponReceiving(PipelineHeaderData.scenario.uponReceiving)
.withQuery(query)
.withRequest(PipelineHeaderData.request)
.withVariables(PipelineHeaderData.variables)
@@ -50,4 +51,39 @@ pactWith(
},
);
-/* eslint-enable @gitlab/require-i18n-strings */
+// GraphQL query: deletePipeline
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: DELETE_PIPELINE_PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(DELETE_PIPELINE_PROVIDER_NAME, () => {
+ beforeEach(async () => {
+ const query = await extractGraphQLQuery(
+ 'app/assets/javascripts/pipelines/graphql/mutations/delete_pipeline.mutation.graphql',
+ );
+ const graphqlQuery = new GraphQLInteraction()
+ .given(DeletePipeline.scenario.state)
+ .uponReceiving(DeletePipeline.scenario.uponReceiving)
+ .withQuery(query)
+ .withRequest(DeletePipeline.request)
+ .withVariables(DeletePipeline.variables)
+ .willRespondWith(DeletePipeline.success);
+
+ provider.addInteraction(graphqlQuery);
+ });
+
+ it('returns a successful body', async () => {
+ const deletePipelineResponse = await deletePipeline({
+ url: provider.mockService.baseUrl,
+ });
+
+ expect(deletePipelineResponse.status).toEqual(DeletePipeline.success.status);
+ });
+ });
+ },
+);
diff --git a/spec/contracts/consumer/specs/project/pipeline_schedule/edit.spec.js b/spec/contracts/consumer/specs/project/pipeline_schedule/edit.spec.js
new file mode 100644
index 00000000000..117e6754255
--- /dev/null
+++ b/spec/contracts/consumer/specs/project/pipeline_schedule/edit.spec.js
@@ -0,0 +1,41 @@
+import { pactWith } from 'jest-pact';
+
+import { UpdatePipelineSchedule } from '../../../fixtures/project/pipeline_schedule/update_pipeline_schedule.fixture';
+import { updatePipelineSchedule } from '../../../resources/api/pipeline_schedules';
+
+const CONSUMER_NAME = 'PipelineSchedules#edit';
+const CONSUMER_LOG = '../logs/consumer.log';
+const CONTRACT_DIR = '../contracts/project/pipeline_schedule/edit';
+const PROVIDER_NAME = 'PUT Edit a pipeline schedule';
+
+// API endpoint: /pipelines.json
+pactWith(
+ {
+ consumer: CONSUMER_NAME,
+ provider: PROVIDER_NAME,
+ log: CONSUMER_LOG,
+ dir: CONTRACT_DIR,
+ },
+
+ (provider) => {
+ describe(PROVIDER_NAME, () => {
+ beforeEach(() => {
+ const interaction = {
+ ...UpdatePipelineSchedule.scenario,
+ ...UpdatePipelineSchedule.request,
+ willRespondWith: UpdatePipelineSchedule.success,
+ };
+
+ provider.addInteraction(interaction);
+ });
+
+ it('returns a successful body', async () => {
+ const pipelineSchedule = await updatePipelineSchedule({
+ url: provider.mockService.baseUrl,
+ });
+
+ expect(pipelineSchedule.status).toEqual(UpdatePipelineSchedule.success.status);
+ });
+ });
+ },
+);
diff --git a/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json
index b98a0127e54..c59a3d55f43 100644
--- a/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json
+++ b/spec/contracts/contracts/project/merge_request/show/mergerequest#show-merge_request_diffs_metadata_endpoint.json
@@ -7,7 +7,7 @@
},
"interactions": [
{
- "description": "a request for Diffs Metadata",
+ "description": "a request for diffs metadata",
"providerState": "a merge request exists",
"request": {
"method": "GET",
@@ -220,4 +220,4 @@
"version": "2.0.0"
}
}
-} \ No newline at end of file
+}
diff --git a/spec/contracts/contracts/project/pipeline/new/pipelines#new-post_create_a_new_pipeline.json b/spec/contracts/contracts/project/pipeline/new/pipelines#new-post_create_a_new_pipeline.json
new file mode 100644
index 00000000000..4627f0cb0bf
--- /dev/null
+++ b/spec/contracts/contracts/project/pipeline/new/pipelines#new-post_create_a_new_pipeline.json
@@ -0,0 +1,43 @@
+{
+ "consumer": {
+ "name": "Pipelines#new"
+ },
+ "provider": {
+ "name": "POST Create a new pipeline"
+ },
+ "interactions": [
+ {
+ "description": "a request to create a new pipeline",
+ "providerState": "a project with a valid .gitlab-ci.yml configuration exists",
+ "request": {
+ "method": "POST",
+ "path": "/gitlab-org/gitlab-qa/-/pipelines",
+ "headers": {
+ "Accept": "*/*",
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "ref": "master"
+ }
+ },
+ "response": {
+ "status": 302,
+ "headers": {
+ "Content-Type": "text/html; charset=utf-8"
+ },
+ "body": "<html><body>You are being <a href=\"http://example.org/gitlab-org/gitlab-qa/-/pipelines/5\">redirected</a>.</body></html>",
+ "matchingRules": {
+ "$.body": {
+ "match": "regex",
+ "regex": "You are being <a href=\\\"(.)+\\/pipelines\\/[0-9]+\\\">redirected<\\/a>."
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/contracts/project/pipeline/show/pipelines#show-delete_pipeline.json b/spec/contracts/contracts/project/pipeline/show/pipelines#show-delete_pipeline.json
new file mode 100644
index 00000000000..795c8a6e197
--- /dev/null
+++ b/spec/contracts/contracts/project/pipeline/show/pipelines#show-delete_pipeline.json
@@ -0,0 +1,44 @@
+{
+ "consumer": {
+ "name": "Pipelines#show"
+ },
+ "provider": {
+ "name": "DELETE pipeline"
+ },
+ "interactions": [
+ {
+ "description": "a request to delete the pipeline",
+ "providerState": "a pipeline for a project exists",
+ "request": {
+ "method": "POST",
+ "path": "/api/graphql",
+ "headers": {
+ "content-type": "application/json"
+ },
+ "body": {
+ "query": "mutation deletePipeline($id: CiPipelineID!) {\n pipelineDestroy(input: { id: $id }) {\n errors\n }\n}\n",
+ "variables": {
+ "id": "gid://gitlab/Ci::Pipeline/316112"
+ }
+ },
+ "matchingRules": {
+ "$.body.query": {
+ "match": "regex",
+ "regex": "mutation\\s*deletePipeline\\(\\$id:\\s*CiPipelineID!\\)\\s*\\{\\s*pipelineDestroy\\(input:\\s*\\{\\s*id:\\s*\\$id\\s*\\}\\)\\s*\\{\\s*errors\\s*\\}\\s*\\}\\s*"
+ }
+ }
+ },
+ "response": {
+ "status": 200,
+ "headers": {
+ "Content-Type": "application/json; charset=utf-8"
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/contracts/project/pipeline_schedule/edit/pipelineschedules#edit-put_edit_a_pipeline_schedule.json b/spec/contracts/contracts/project/pipeline_schedule/edit/pipelineschedules#edit-put_edit_a_pipeline_schedule.json
new file mode 100644
index 00000000000..e0dd68dc230
--- /dev/null
+++ b/spec/contracts/contracts/project/pipeline_schedule/edit/pipelineschedules#edit-put_edit_a_pipeline_schedule.json
@@ -0,0 +1,48 @@
+{
+ "consumer": {
+ "name": "PipelineSchedules#edit"
+ },
+ "provider": {
+ "name": "PUT Edit a pipeline schedule"
+ },
+ "interactions": [
+ {
+ "description": "a request to edit a pipeline schedule",
+ "providerState": "a project with a pipeline schedule exists",
+ "request": {
+ "method": "PUT",
+ "path": "/gitlab-org/gitlab-qa/-/pipeline_schedules/25",
+ "headers": {
+ "Accept": "*/*",
+ "Content-Type": "application/json; charset=utf-8"
+ },
+ "body": {
+ "schedule": {
+ "description": "bar",
+ "cron": "0 1 * * *",
+ "cron_timezone": "UTC",
+ "active": true
+ }
+ }
+ },
+ "response": {
+ "status": 302,
+ "headers": {
+ "Content-Type": "text/html; charset=utf-8"
+ },
+ "body": "<html><body>You are being <a href=\"http://example.org/gitlab-org/gitlab-qa/-/pipelines/5\">redirected</a>.</body></html>",
+ "matchingRules": {
+ "$.body": {
+ "match": "regex",
+ "regex": "You are being <a href=\\\"(.)+\\\">redirected<\\/a>."
+ }
+ }
+ }
+ }
+ ],
+ "metadata": {
+ "pactSpecification": {
+ "version": "2.0.0"
+ }
+ }
+} \ No newline at end of file
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_batch_helper.rb
index 7d1fbe91e86..f94ce47b1f3 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_batch_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_batch_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require_relative '../../../spec_helper'
-require_relative '../../../states/project/merge_request/diffs_batch_state'
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/merge_request/show_state'
module Provider
module DiffsBatchHelper
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_metadata_helper.rb
index 5f0c58d18d4..61567214b7a 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_request/diffs_metadata_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/show/diffs_metadata_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require_relative '../../../spec_helper'
-require_relative '../../../states/project/merge_request/diffs_metadata_state'
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/merge_request/show_state'
module Provider
module DiffsMetadataHelper
diff --git a/spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_request/show/discussions_helper.rb
index 0f4244ba40a..fa76ce8889a 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_request/discussions_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_request/show/discussions_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require_relative '../../../spec_helper'
-require_relative '../../../states/project/merge_request/discussions_state'
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/merge_request/show_state'
module Provider
module DiscussionsHelper
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/index/create_a_new_pipeline_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/index/create_a_new_pipeline_helper.rb
new file mode 100644
index 00000000000..247a7c4ca8e
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/index/create_a_new_pipeline_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/pipeline/new_state'
+
+module Provider
+ module CreateNewPipelineHelper
+ Pact.service_provider "POST Create a new pipeline" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'Pipelines#new' do
+ pact_uri '../contracts/project/pipeline/new/pipelines#new-post_create_a_new_pipeline.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/index/get_list_project_pipelines_helper.rb
index 5307468b7c6..80cbbe3b4dd 100644
--- a/spec/contracts/provider/pact_helpers/project/pipeline/get_list_project_pipelines_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/index/get_list_project_pipelines_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require_relative '../../../spec_helper'
-require_relative '../../../states/project/pipeline/pipelines_state'
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/pipeline/index_state'
module Provider
module GetListProjectPipelinesHelper
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/show/delete_pipeline_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/show/delete_pipeline_helper.rb
new file mode 100644
index 00000000000..2d29fabfeca
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/show/delete_pipeline_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/pipeline/show_state'
+
+module Provider
+ module DeletePipelineHelper
+ Pact.service_provider "DELETE pipeline" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'Pipelines#show' do
+ pact_uri '../contracts/project/pipeline/show/pipelines#show-delete_pipeline.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline/show/get_pipeline_header_data_helper.rb
index abb2781f987..bc8c04cc455 100644
--- a/spec/contracts/provider/pact_helpers/project/pipeline/get_pipeline_header_data_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipeline/show/get_pipeline_header_data_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-require_relative '../../../spec_helper'
-require_relative '../../../states/project/pipeline/pipeline_state'
+require_relative '../../../../spec_helper'
+require_relative '../../../../states/project/pipeline/show_state'
module Provider
module GetPipelinesHeaderDataHelper
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline_schedule/update_pipeline_schedule_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline_schedule/update_pipeline_schedule_helper.rb
new file mode 100644
index 00000000000..a83aa9524dc
--- /dev/null
+++ b/spec/contracts/provider/pact_helpers/project/pipeline_schedule/update_pipeline_schedule_helper.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative '../../../spec_helper'
+require_relative '../../../states/project/pipeline_schedule/edit_state'
+
+module Provider
+ module CreateNewPipelineHelper
+ Pact.service_provider "PUT Edit a pipeline schedule" do
+ app { Environments::Test.app }
+
+ honours_pact_with 'PipelineSchedule#edit' do
+ pact_uri '../contracts/project/pipeline_schedule/edit/pipelineschedules#edit-put_edit_a_pipeline_schedule.json'
+ end
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb b/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb
deleted file mode 100644
index ac20c17c187..00000000000
--- a/spec/contracts/provider/states/project/merge_request/diffs_batch_state.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-Pact.provider_states_for "MergeRequest#show" do
- provider_state "a merge request with diffs exists" do
- set_up do
- user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
- namespace = create(:namespace, name: 'gitlab-org')
- project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
-
- project.add_maintainer(user)
-
- merge_request = create(:merge_request_with_multiple_diffs, source_project: project)
- merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
-
- create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
- end
- end
-end
diff --git a/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb b/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb
deleted file mode 100644
index 8754232690c..00000000000
--- a/spec/contracts/provider/states/project/merge_request/diffs_metadata_state.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-Pact.provider_states_for "MergeRequest#show" do
- provider_state "a merge request exists" do
- set_up do
- user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
- namespace = create(:namespace, name: 'gitlab-org')
- project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
-
- project.add_maintainer(user)
-
- merge_request = create(:merge_request, source_project: project)
- merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
-
- create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
- end
- end
-end
diff --git a/spec/contracts/provider/states/project/merge_request/discussions_state.rb b/spec/contracts/provider/states/project/merge_request/discussions_state.rb
deleted file mode 100644
index 2d64f85eedf..00000000000
--- a/spec/contracts/provider/states/project/merge_request/discussions_state.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-# frozen_string_literal: true
-
-Pact.provider_states_for "MergeRequest#show" do
- provider_state "a merge request with discussions exists" do
- set_up do
- user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
- namespace = create(:namespace, name: 'gitlab-org')
- project = create(:project, name: 'gitlab-qa', namespace: namespace)
-
- project.add_maintainer(user)
-
- merge_request = create(:merge_request_with_diffs, source_project: project, author: user)
-
- create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: user)
- end
- end
-end
diff --git a/spec/contracts/provider/states/project/merge_request/show_state.rb b/spec/contracts/provider/states/project/merge_request/show_state.rb
new file mode 100644
index 00000000000..46f322f723a
--- /dev/null
+++ b/spec/contracts/provider/states/project/merge_request/show_state.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "MergeRequest#show" do
+ provider_state "a merge request with diffs exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request_with_multiple_diffs, source_project: project)
+ merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
+
+ create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
+ end
+ end
+
+ provider_state "a merge request exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :custom_repo, name: 'gitlab-qa', namespace: namespace, files: {})
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request, source_project: project)
+ merge_request_diff = create(:merge_request_diff, merge_request: merge_request)
+
+ create(:merge_request_diff_file, :new_file, merge_request_diff: merge_request_diff)
+ end
+ end
+
+ provider_state "a merge request with discussions exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, name: 'gitlab-qa', namespace: namespace)
+
+ project.add_maintainer(user)
+
+ merge_request = create(:merge_request_with_diffs, source_project: project, author: user)
+
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project, author: user)
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/pipeline/pipelines_state.rb b/spec/contracts/provider/states/project/pipeline/index_state.rb
index 639c25e9894..639c25e9894 100644
--- a/spec/contracts/provider/states/project/pipeline/pipelines_state.rb
+++ b/spec/contracts/provider/states/project/pipeline/index_state.rb
diff --git a/spec/contracts/provider/states/project/pipeline/new_state.rb b/spec/contracts/provider/states/project/pipeline/new_state.rb
new file mode 100644
index 00000000000..95914180bec
--- /dev/null
+++ b/spec/contracts/provider/states/project/pipeline/new_state.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "Pipelines#new" do
+ provider_state "a project with a valid .gitlab-ci.yml configuration exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(
+ :project,
+ :custom_repo,
+ name: 'gitlab-qa',
+ namespace: namespace,
+ creator: user,
+ files: {
+ '.gitlab-ci.yml' => <<~YAML
+ test-success:
+ script: echo 'OK'
+ YAML
+ })
+
+ project.add_maintainer(user)
+ end
+ end
+end
diff --git a/spec/contracts/provider/states/project/pipeline/pipeline_state.rb b/spec/contracts/provider/states/project/pipeline/show_state.rb
index d1a4cd34bdd..3365647cd13 100644
--- a/spec/contracts/provider/states/project/pipeline/pipeline_state.rb
+++ b/spec/contracts/provider/states/project/pipeline/show_state.rb
@@ -15,6 +15,7 @@ Pact.provider_states_for "Pipelines#show" do
:ci_pipeline,
:with_job,
:success,
+ id: 316112,
iid: 1,
project: project,
user: user,
diff --git a/spec/contracts/provider/states/project/pipeline_schedule/edit_state.rb b/spec/contracts/provider/states/project/pipeline_schedule/edit_state.rb
new file mode 100644
index 00000000000..4ee714f15f3
--- /dev/null
+++ b/spec/contracts/provider/states/project/pipeline_schedule/edit_state.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+Pact.provider_states_for "PipelineSchedules#edit" do
+ provider_state "a project with a pipeline schedule exists" do
+ set_up do
+ user = User.find_by(name: Provider::UsersHelper::CONTRACT_USER_NAME)
+ namespace = create(:namespace, name: 'gitlab-org')
+ project = create(:project, :repository, name: 'gitlab-qa', namespace: namespace, creator: user)
+
+ project.add_maintainer(user)
+
+ create(:ci_pipeline_schedule, id: 25, project: project, owner: user)
+ end
+ end
+end
diff --git a/spec/controllers/admin/dev_ops_report_controller_spec.rb b/spec/controllers/admin/dev_ops_report_controller_spec.rb
index 49e6c0f69bd..5d7a7e089aa 100644
--- a/spec/controllers/admin/dev_ops_report_controller_spec.rb
+++ b/spec/controllers/admin/dev_ops_report_controller_spec.rb
@@ -28,6 +28,17 @@ RSpec.describe Admin::DevOpsReportController do
let(:request_params) { { tab: 'devops-score' } }
end
+
+ it_behaves_like 'Snowplow event tracking' do
+ subject { get :show, format: :html }
+
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { described_class.name }
+ let(:action) { 'perform_analytics_usage_action' }
+ let(:label) { 'redis_hll_counters.analytics.analytics_total_unique_counts_monthly' }
+ let(:property) { 'i_analytics_dev_ops_score' }
+ let(:namespace) { nil }
+ end
end
end
diff --git a/spec/controllers/admin/identities_controller_spec.rb b/spec/controllers/admin/identities_controller_spec.rb
index 6ac5ce13884..e32191e04e7 100644
--- a/spec/controllers/admin/identities_controller_spec.rb
+++ b/spec/controllers/admin/identities_controller_spec.rb
@@ -9,6 +9,30 @@ RSpec.describe Admin::IdentitiesController do
sign_in(admin)
end
+ describe 'GET #index' do
+ context 'when the user has no identities' do
+ it 'shows no identities' do
+ get :index, params: { user_id: admin.username }
+
+ expect(assigns(:user)).to eq(admin)
+ expect(assigns(:identities)).to be_blank
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when the user has identities' do
+ let(:ldap_user) { create(:omniauth_user, provider: 'ldapmain', extern_uid: 'ldap-uid') }
+
+ it 'shows identities' do
+ get :index, params: { user_id: ldap_user.username }
+
+ expect(assigns(:user)).to eq(ldap_user)
+ expect(assigns(:identities)).to eq(ldap_user.identities)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
describe 'UPDATE identity' do
let(:user) { create(:omniauth_user, provider: 'ldapmain', extern_uid: 'uid=myuser,ou=people,dc=example,dc=com') }
diff --git a/spec/controllers/admin/topics_controller_spec.rb b/spec/controllers/admin/topics_controller_spec.rb
index ee36d5f1def..87093e0263b 100644
--- a/spec/controllers/admin/topics_controller_spec.rb
+++ b/spec/controllers/admin/topics_controller_spec.rb
@@ -173,4 +173,44 @@ RSpec.describe Admin::TopicsController do
end
end
end
+
+ describe 'POST #merge' do
+ let_it_be(:source_topic) { create(:topic, name: 'source_topic') }
+ let_it_be(:project) { create(:project, topic_list: source_topic.name ) }
+
+ it 'merges source topic into target topic' do
+ post :merge, params: { source_topic_id: source_topic.id, target_topic_id: topic.id }
+
+ expect(response).to redirect_to(admin_topics_path)
+ expect(topic.projects).to contain_exactly(project)
+ expect { source_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'renders a 404 error for non-existing id' do
+ post :merge, params: { source_topic_id: non_existing_record_id, target_topic_id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect { topic.reload }.not_to raise_error
+ end
+
+ it 'renders a 400 error for identical topic ids' do
+ post :merge, params: { source_topic_id: topic, target_topic_id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect { topic.reload }.not_to raise_error
+ end
+
+ context 'as a normal user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'renders a 404 error' do
+ post :merge, params: { source_topic_id: source_topic.id, target_topic_id: topic.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ expect { source_topic.reload }.not_to raise_error
+ end
+ end
+ end
end
diff --git a/spec/controllers/admin/usage_trends_controller_spec.rb b/spec/controllers/admin/usage_trends_controller_spec.rb
index 35fb005aacb..356f603bf57 100644
--- a/spec/controllers/admin/usage_trends_controller_spec.rb
+++ b/spec/controllers/admin/usage_trends_controller_spec.rb
@@ -13,5 +13,18 @@ RSpec.describe Admin::UsageTrendsController do
it_behaves_like 'tracking unique visits', :index do
let(:target_id) { 'i_analytics_instance_statistics' }
end
+
+ it_behaves_like 'Snowplow event tracking' do
+ subject { get :index }
+
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { described_class.name }
+ let(:action) { 'perform_analytics_usage_action' }
+ let(:label) { 'redis_hll_counters.analytics.analytics_total_unique_counts_monthly' }
+ let(:property) { 'i_analytics_instance_statistics' }
+ let(:namespace) { nil }
+ let(:project) { nil }
+ let(:user) { admin }
+ end
end
end
diff --git a/spec/controllers/admin/users_controller_spec.rb b/spec/controllers/admin/users_controller_spec.rb
index c46a12680a2..515ad9daf36 100644
--- a/spec/controllers/admin/users_controller_spec.rb
+++ b/spec/controllers/admin/users_controller_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Admin::UsersController do
it 'displays the rejection message' do
subject
- expect(response).to redirect_to(admin_users_path)
+ expect(response).to redirect_to(admin_user_path(user))
expect(flash[:notice]).to eq("You've rejected #{user.name}")
end
@@ -612,8 +612,8 @@ RSpec.describe Admin::UsersController do
end
context 'when the new password does not match the password confirmation' do
- let(:password) { 'some_password' }
- let(:password_confirmation) { 'not_same_as_password' }
+ let(:password) { User.random_password }
+ let(:password_confirmation) { User.random_password }
it 'shows the edit page again' do
update_password(user, password, password_confirmation)
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index 8fcc3a7fccf..645360289d1 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -67,30 +67,10 @@ RSpec.describe Groups::UploadsController do
end
context "when not signed in" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
+ it "responds with appropriate status" do
+ show_upload
- it "responds with appropriate status" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -100,30 +80,10 @@ RSpec.describe Groups::UploadsController do
end
context "when the user doesn't have access to the model" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
+ it "responds with status 200" do
+ show_upload
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -135,30 +95,10 @@ RSpec.describe Groups::UploadsController do
end
context "when not signed in" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
+ it "responds with appropriate status" do
+ show_upload
- it "responds with appropriate status" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
@@ -168,30 +108,10 @@ RSpec.describe Groups::UploadsController do
end
context "when the user doesn't have access to the model" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
+ it "responds with status 200" do
+ show_upload
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index 7177c8c10a6..3be12717664 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -245,11 +245,11 @@ RSpec.describe Import::BulkImportsController do
let(:bulk_import_params) do
[{ "source_type" => "group_entity",
"source_full_path" => "full_path",
- "destination_name" => "destination_name",
+ "destination_slug" => "destination_name",
"destination_namespace" => "root" },
{ "source_type" => "group_entity2",
"source_full_path" => "full_path2",
- "destination_name" => "destination_name2",
+ "destination_slug" => "destination_name2",
"destination_namespace" => "root" }]
end
@@ -258,7 +258,7 @@ RSpec.describe Import::BulkImportsController do
session[:bulk_import_gitlab_url] = instance_url
end
- it 'executes BulkImpors::CreateService' do
+ it 'executes BulkImports::CreateService' do
error_response = ServiceResponse.error(message: 'Record invalid', http_status: :unprocessable_entity)
expect_next_instance_of(
@@ -276,6 +276,38 @@ RSpec.describe Import::BulkImportsController do
expect(json_response).to eq([{ "success" => true, "id" => bulk_import.id, "message" => nil },
{ "success" => false, "id" => nil, "message" => "Record invalid" }])
end
+
+ context 'when entity destination_name is specified' do
+ let(:bulk_import_params) do
+ [
+ {
+ "source_type" => "group_entity",
+ "source_full_path" => "full_path",
+ "destination_name" => "destination_name",
+ "destination_namespace" => "root"
+ }
+ ]
+ end
+
+ it 'replaces destination_name with destination_slug and executes BulkImports::CreateService' do
+ entity = {
+ "source_type" => "group_entity",
+ "source_full_path" => "full_path",
+ "destination_slug" => "destination_name",
+ "destination_namespace" => "root"
+ }
+
+ expect_next_instance_of(
+ ::BulkImports::CreateService, user, entity, { url: instance_url, access_token: pat }) do |service|
+ allow(service).to receive(:execute).and_return(ServiceResponse.success(payload: bulk_import))
+ end
+
+ post :create, params: { bulk_import: bulk_import_params }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to match_array([{ "success" => true, "id" => bulk_import.id, "message" => nil }])
+ end
+ end
end
end
diff --git a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
index aafea0050d3..0e531dbaf4b 100644
--- a/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
+++ b/spec/controllers/profiles/personal_access_tokens_controller_spec.rb
@@ -87,6 +87,38 @@ RSpec.describe Profiles::PersonalAccessTokensController do
end
end
+ context "tokens returned are ordered" do
+ let(:expires_1_day_from_now) { 1.day.from_now.to_date }
+ let(:expires_2_day_from_now) { 2.days.from_now.to_date }
+
+ before do
+ create(:personal_access_token, user: user, name: "Token1", expires_at: expires_1_day_from_now)
+ create(:personal_access_token, user: user, name: "Token2", expires_at: expires_2_day_from_now)
+ end
+
+ it "orders token list ascending on expires_at" do
+ get :index
+
+ first_token = assigns(:active_personal_access_tokens).first.as_json
+ expect(first_token[:name]).to eq("Token1")
+ expect(first_token[:expires_at]).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+ end
+
+ it "orders tokens on id in case token has same expires_at" do
+ create(:personal_access_token, user: user, name: "Token3", expires_at: expires_1_day_from_now)
+
+ get :index
+
+ first_token = assigns(:active_personal_access_tokens).first.as_json
+ expect(first_token[:name]).to eq("Token3")
+ expect(first_token[:expires_at]).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+
+ second_token = assigns(:active_personal_access_tokens).second.as_json
+ expect(second_token[:name]).to eq("Token1")
+ expect(second_token[:expires_at]).to eq(expires_1_day_from_now.strftime("%Y-%m-%d"))
+ end
+ end
+
context "access_token_pagination feature flag is disabled" do
before do
stub_feature_flags(access_token_pagination: false)
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index 6e7cc058fbc..89185a8f856 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -3,16 +3,16 @@
require('spec_helper')
RSpec.describe ProfilesController, :request_store do
- let(:password) { 'longsecret987!' }
+ let(:password) { User.random_password }
let(:user) { create(:user, password: password) }
describe 'POST update' do
it 'does not update password' do
sign_in(user)
-
+ new_password = User.random_password
expect do
post :update,
- params: { user: { password: 'hello12345', password_confirmation: 'hello12345' } }
+ params: { user: { password: new_password, password_confirmation: new_password } }
end.not_to change { user.reload.encrypted_password }
expect(response).to have_gitlab_http_status(:found)
diff --git a/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
index 3f0318c3973..8903592ba15 100644
--- a/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
+++ b/spec/controllers/projects/analytics/cycle_analytics/stages_controller_spec.rb
@@ -54,6 +54,32 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
end
+ shared_examples 'project-level value stream analytics with guest user' do
+ let_it_be(:guest) { create(:user) }
+
+ before do
+ project.add_guest(guest)
+ sign_out(user)
+ sign_in(guest)
+ end
+
+ %w[code review].each do |id|
+ it "disallows stage #{id}" do
+ get action, params: params.merge(id: id)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ %w[issue plan test staging].each do |id|
+ it "allows stage #{id}" do
+ get action, params: params.merge(id: id)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
describe 'GET index' do
let(:action) { :index }
@@ -78,6 +104,20 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
it_behaves_like 'project-level value stream analytics request error examples'
+
+ it 'only returns authorized stages' do
+ guest = create(:user)
+ sign_out(user)
+ sign_in(guest)
+ project.add_guest(guest)
+
+ get action, params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+
+ expect(json_response['stages'].map { |stage| stage['title'] })
+ .to contain_exactly('Issue', 'Plan', 'Test', 'Staging')
+ end
end
describe 'GET median' do
@@ -102,6 +142,8 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
it_behaves_like 'project-level value stream analytics request error examples'
+
+ it_behaves_like 'project-level value stream analytics with guest user'
end
describe 'GET average' do
@@ -126,6 +168,8 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
it_behaves_like 'project-level value stream analytics request error examples'
+
+ it_behaves_like 'project-level value stream analytics with guest user'
end
describe 'GET count' do
@@ -150,6 +194,8 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
it_behaves_like 'project-level value stream analytics request error examples'
+
+ it_behaves_like 'project-level value stream analytics with guest user'
end
describe 'GET records' do
@@ -174,5 +220,7 @@ RSpec.describe Projects::Analytics::CycleAnalytics::StagesController do
end
it_behaves_like 'project-level value stream analytics request error examples'
+
+ it_behaves_like 'project-level value stream analytics with guest user'
end
end
diff --git a/spec/controllers/projects/blob_controller_spec.rb b/spec/controllers/projects/blob_controller_spec.rb
index cc807098498..887a5ba598f 100644
--- a/spec/controllers/projects/blob_controller_spec.rb
+++ b/spec/controllers/projects/blob_controller_spec.rb
@@ -352,7 +352,6 @@ RSpec.describe Projects::BlobController do
project_new_merge_request_path(
forked_project,
merge_request: {
- source_project_id: forked_project.id,
target_project_id: project.id,
source_branch: "fork-test-1",
target_branch: "master"
diff --git a/spec/controllers/projects/ci/secure_files_controller_spec.rb b/spec/controllers/projects/ci/secure_files_controller_spec.rb
deleted file mode 100644
index 200997e31b9..00000000000
--- a/spec/controllers/projects/ci/secure_files_controller_spec.rb
+++ /dev/null
@@ -1,67 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Ci::SecureFilesController do
- let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user) }
-
- subject(:show_request) { get :show, params: { namespace_id: project.namespace, project_id: project } }
-
- describe 'GET #show' do
- context 'when the :ci_secure_files feature flag is enabled' do
- context 'with enough privileges' do
- before do
- stub_feature_flags(ci_secure_files: true)
- sign_in(user)
- project.add_developer(user)
- show_request
- end
-
- it { expect(response).to have_gitlab_http_status(:ok) }
-
- it 'renders show page' do
- expect(response).to render_template :show
- end
- end
- end
-
- context 'when the :ci_secure_files feature flag is disabled' do
- context 'with enough privileges' do
- before do
- stub_feature_flags(ci_secure_files: false)
- sign_in(user)
- project.add_developer(user)
- show_request
- end
-
- it 'responds with 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context 'without enough privileges' do
- before do
- sign_in(user)
- project.add_reporter(user)
- show_request
- end
-
- it 'responds with 404' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'an unauthenticated user' do
- before do
- show_request
- end
-
- it 'redirects to sign in' do
- expect(response).to have_gitlab_http_status(:found)
- expect(response).to redirect_to('/users/sign_in')
- end
- end
- end
-end
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index d45ea268e64..12202518e1e 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -259,9 +259,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -300,9 +302,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -349,9 +353,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -401,9 +407,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_allowed_for(:developer).of(project) }
@@ -515,9 +523,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -593,9 +603,11 @@ RSpec.describe Projects::ClustersController do
it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
expect { go }.to be_allowed_for(:admin)
end
+
it 'is disabled for admin when admin mode disabled' do
expect { go }.to be_denied_for(:admin)
end
+
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index a72c98552a5..edb07bbdce6 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -82,6 +82,22 @@ RSpec.describe Projects::CommitController do
expect(response).to be_successful
end
+ it 'only loads blobs in the current page' do
+ stub_feature_flags(async_commit_diff_files: false)
+ stub_const('Projects::CommitController::COMMIT_DIFFS_PER_PAGE', 1)
+
+ commit = project.commit('1a0b36b3cdad1d2ee32457c102a8c0b7056fa863')
+
+ expect_next_instance_of(Repository) do |repository|
+ # This commit contains 3 changed files but we expect only the blobs for the first one to be loaded
+ expect(repository).to receive(:blobs_at).with([[commit.id, '.gitignore']], anything).and_call_original
+ end
+
+ go(id: commit.id)
+
+ expect(response).to be_ok
+ end
+
shared_examples "export as" do |format|
it "does generally work" do
go(id: commit.id, format: format)
@@ -378,7 +394,6 @@ RSpec.describe Projects::CommitController do
project_new_merge_request_path(
source_project,
merge_request: {
- source_project_id: source_project.id,
target_project_id: project.id,
source_branch: branch,
target_branch: 'feature'
diff --git a/spec/controllers/projects/compare_controller_spec.rb b/spec/controllers/projects/compare_controller_spec.rb
index e6e0307d0ca..6ed6f7017e3 100644
--- a/spec/controllers/projects/compare_controller_spec.rb
+++ b/spec/controllers/projects/compare_controller_spec.rb
@@ -226,8 +226,8 @@ RSpec.describe Projects::CompareController do
context 'when page is valid' do
let(:from_project_id) { nil }
- let(:from_ref) { '08f22f25' }
- let(:to_ref) { '66eceea0' }
+ let(:from_ref) { '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9' }
+ let(:to_ref) { '5937ac0a7beb003549fc5fd26fc247adbce4a52e' }
let(:page) { 1 }
it 'shows the diff' do
@@ -237,6 +237,21 @@ RSpec.describe Projects::CompareController do
expect(assigns(:diffs).diff_files.first).to be_present
expect(assigns(:commits).length).to be >= 1
end
+
+ it 'only loads blobs in the current page' do
+ stub_const('Projects::CompareController::COMMIT_DIFFS_PER_PAGE', 1)
+
+ expect_next_instance_of(Repository) do |repository|
+ # This comparison contains 4 changed files but we expect only the blobs for the first one to be loaded
+ expect(repository).to receive(:blobs_at).with(
+ contain_exactly([from_ref, '.gitmodules'], [to_ref, '.gitmodules']), anything
+ ).and_call_original
+ end
+
+ show_request
+
+ expect(response).to be_successful
+ end
end
context 'when page is not valid' do
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index f4cad5790a3..1a6edab795d 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -233,7 +233,7 @@ RSpec.describe Projects::EnvironmentsController do
end
context "when environment params are invalid" do
- let(:params) { environment_params.merge(environment: { name: '/foo/', external_url: '/git.gitlab.com' }) }
+ let(:params) { environment_params.merge(environment: { external_url: 'javascript:alert("hello")' }) }
it 'returns bad request' do
subject
diff --git a/spec/controllers/projects/hooks_controller_spec.rb b/spec/controllers/projects/hooks_controller_spec.rb
index a275bc28631..ba7b712964c 100644
--- a/spec/controllers/projects/hooks_controller_spec.rb
+++ b/spec/controllers/projects/hooks_controller_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Projects::HooksController do
def it_renders_correctly
expect(response).to have_gitlab_http_status(:ok)
expect(response).to render_template(:edit)
- expect(response).to render_template('projects/hook_logs/_index')
+ expect(response).to render_template('shared/hook_logs/_index')
end
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index badac688229..c48be8efb1b 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -1607,22 +1607,32 @@ RSpec.describe Projects::IssuesController do
project.add_developer(user)
end
- it "returns 302 for project members with developer role" do
- import_csv
+ context 'when upload proceeds correctly' do
+ it "returns 302 for project members with developer role" do
+ import_csv
- expect(flash[:notice]).to eq(_("Your issues are being imported. Once finished, you'll get a confirmation email."))
- expect(response).to redirect_to(project_issues_path(project))
- end
+ expect(flash[:notice]).to eq(_("Your issues are being imported. Once finished, you'll get a confirmation email."))
+ expect(response).to redirect_to(project_issues_path(project))
+ end
+
+ it 'enqueues an import job' do
+ expect(ImportIssuesCsvWorker).to receive(:perform_async).with(user.id, project.id, Integer)
- it "shows error when upload fails" do
- expect_next_instance_of(UploadService) do |upload_service|
- expect(upload_service).to receive(:execute).and_return(nil)
+ import_csv
end
+ end
- import_csv
+ context 'when upload fails' do
+ it "shows error when upload fails" do
+ expect_next_instance_of(UploadService) do |upload_service|
+ expect(upload_service).to receive(:execute).and_return(nil)
+ end
- expect(flash[:alert]).to include(_('File upload error.'))
- expect(response).to redirect_to(project_issues_path(project))
+ import_csv
+
+ expect(flash[:alert]).to include(_('File upload error.'))
+ expect(response).to redirect_to(project_issues_path(project))
+ end
end
end
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 8ccbc0d3fe2..ed5e32df8ea 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -1894,15 +1894,12 @@ RSpec.describe Projects::MergeRequestsController do
# First run to insert test data from lets, which does take up some 30 queries
get_ci_environments_status
- control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { get_ci_environments_status }.count
+ control_count = ActiveRecord::QueryRecorder.new { get_ci_environments_status }
environment2 = create(:environment, project: forked)
create(:deployment, :succeed, environment: environment2, sha: sha, ref: 'master', deployable: build)
- # TODO address the last 3 queries
- # See https://gitlab.com/gitlab-org/gitlab-foss/issues/63952 (3 queries)
- leeway = 3
- expect { get_ci_environments_status }.not_to exceed_all_query_limit(control_count + leeway)
+ expect { get_ci_environments_status }.not_to exceed_all_query_limit(control_count)
end
end
@@ -2039,25 +2036,50 @@ RSpec.describe Projects::MergeRequestsController do
end
describe 'POST #rebase' do
+ let(:other_params) { {} }
+ let(:params) { { namespace_id: project.namespace, project_id: project, id: merge_request }.merge(other_params) }
+
def post_rebase
- post :rebase, params: { namespace_id: project.namespace, project_id: project, id: merge_request }
+ post :rebase, params: params
end
before do
allow(RebaseWorker).to receive(:with_status).and_return(RebaseWorker)
end
- def expect_rebase_worker_for(user)
- expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id, false)
+ def expect_rebase_worker_for(user, skip_ci: false)
+ expect(RebaseWorker).to receive(:perform_async).with(merge_request.id, user.id, skip_ci)
end
context 'successfully' do
- it 'enqeues a RebaseWorker' do
- expect_rebase_worker_for(user)
+ shared_examples 'successful rebase scheduler' do
+ it 'enqueues a RebaseWorker' do
+ expect_rebase_worker_for(user, skip_ci: skip_ci)
- post_rebase
+ post_rebase
- expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'with skip_ci not specified' do
+ let(:skip_ci) { false }
+
+ it_behaves_like 'successful rebase scheduler'
+ end
+
+ context 'with skip_ci enabled' do
+ let(:skip_ci) { true }
+ let(:other_params) { { skip_ci: 'true' } }
+
+ it_behaves_like 'successful rebase scheduler'
+ end
+
+ context 'with skip_ci disabled' do
+ let(:skip_ci) { false }
+ let(:other_params) { { skip_ci: 'false' } }
+
+ it_behaves_like 'successful rebase scheduler'
end
end
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index 85e5de46afd..9050765afd6 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -345,34 +345,77 @@ RSpec.describe Projects::NotesController do
}
end
- context 'when `confidential` parameter is not provided' do
- it 'sets `confidential` to `false` in JSON response' do
+ context 'when parameter is not provided' do
+ it 'sets `confidential` and `internal` to `false` in JSON response' do
create!
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['confidential']).to be false
+ expect(json_response['internal']).to be false
end
end
- context 'when `confidential` parameter is `false`' do
- let(:extra_note_params) { { confidential: false } }
+ context 'when is not a confidential note' do
+ context 'when using the `internal` parameter' do
+ let(:extra_note_params) { { internal: false } }
- it 'sets `confidential` to `false` in JSON response' do
- create!
+ it 'sets `confidential` and `internal` to `false` in JSON response' do
+ create!
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['confidential']).to be false
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['confidential']).to be false
+ expect(json_response['internal']).to be false
+ end
+ end
+
+ context 'when using deprecated `confidential` parameter' do
+ let(:extra_note_params) { { confidential: false } }
+
+ it 'sets `confidential` and `internal` to `false` in JSON response' do
+ create!
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['confidential']).to be false
+ expect(json_response['internal']).to be false
+ end
end
end
- context 'when `confidential` parameter is `true`' do
- let(:extra_note_params) { { confidential: true } }
+ context 'when is a confidential note' do
+ context 'when using the `internal` parameter' do
+ let(:extra_note_params) { { internal: true } }
- it 'sets `confidential` to `true` in JSON response' do
- create!
+ it 'sets `confidential` and `internal` to `true` in JSON response' do
+ create!
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response['confidential']).to be true
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['confidential']).to be true
+ expect(json_response['internal']).to be true
+ end
+ end
+
+ context 'when using deprecated `confidential` parameter' do
+ let(:extra_note_params) { { confidential: true } }
+
+ it 'sets `confidential` and `internal` to `true` in JSON response' do
+ create!
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['confidential']).to be true
+ expect(json_response['internal']).to be true
+ end
+ end
+
+ context 'when `internal` parameter is `true` and `confidential` parameter is `false`' do
+ let(:extra_note_params) { { internal: true, confidential: false } }
+
+ it 'uses the `internal` param as source of truth' do
+ create!
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['confidential']).to be true
+ expect(json_response['internal']).to be true
+ end
end
end
end
diff --git a/spec/controllers/projects/pages_controller_spec.rb b/spec/controllers/projects/pages_controller_spec.rb
index 1fa8838b548..136f98ac907 100644
--- a/spec/controllers/projects/pages_controller_spec.rb
+++ b/spec/controllers/projects/pages_controller_spec.rb
@@ -19,9 +19,9 @@ RSpec.describe Projects::PagesController do
project.add_maintainer(user)
end
- describe 'GET show' do
+ describe 'GET new' do
it 'returns 200 status' do
- get :show, params: request_params
+ get :new, params: request_params
expect(response).to have_gitlab_http_status(:ok)
end
@@ -31,13 +31,55 @@ RSpec.describe Projects::PagesController do
let(:project) { create(:project, namespace: group) }
it 'returns a 200 status code' do
- get :show, params: request_params
+ get :new, params: request_params
expect(response).to have_gitlab_http_status(:ok)
end
end
end
+ describe 'GET show' do
+ subject { get :show, params: request_params }
+
+ context 'when the project does not have onboarding complete' do
+ before do
+ project.pages_metadatum.update_attribute(:deployed, false)
+ project.pages_metadatum.update_attribute(:onboarding_complete, false)
+ end
+
+ it 'redirects to #new' do
+ expect(subject).to redirect_to(action: 'new')
+ end
+ end
+
+ context 'when the project does have onboarding complete' do
+ before do
+ project.pages_metadatum.update_attribute(:onboarding_complete, true)
+ end
+
+ it 'returns 200 status' do
+ expect(subject).to have_gitlab_http_status(:ok)
+ end
+
+ context 'when the project is in a subgroup' do
+ let(:group) { create(:group, :nested) }
+ let(:project) { create(:project, namespace: group) }
+
+ it 'returns a 200 status code' do
+ expect(subject).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'when pages is disabled' do
+ let(:project) { create(:project, :pages_disabled) }
+
+ it 'renders the disabled view' do
+ expect(subject).to render_template :disabled
+ end
+ end
+ end
+
describe 'DELETE destroy' do
it 'returns 302 status' do
delete :destroy, params: request_params
diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb
index dcfccc00347..4996bd90005 100644
--- a/spec/controllers/projects/protected_branches_controller_spec.rb
+++ b/spec/controllers/projects/protected_branches_controller_spec.rb
@@ -3,14 +3,20 @@
require('spec_helper')
RSpec.describe Projects::ProtectedBranchesController do
- let(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
+ let_it_be(:maintainer) { create(:user) }
+
let(:protected_branch) { create(:protected_branch, project: project) }
let(:project_params) { { namespace_id: project.namespace.to_param, project_id: project } }
let(:base_params) { project_params.merge(id: protected_branch.id) }
- let(:user) { create(:user) }
+ let(:user) { maintainer }
+
+ before_all do
+ project.add_maintainer(maintainer)
+ end
before do
- project.add_maintainer(user)
+ sign_in(user)
end
describe "GET #index" do
@@ -30,23 +36,16 @@ RSpec.describe Projects::ProtectedBranchesController do
let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) }
- before do
- sign_in(user)
- end
-
it 'creates the protected branch rule' do
expect do
post(:create, params: project_params.merge(protected_branch: create_params))
end.to change(ProtectedBranch, :count).by(1)
end
- context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
-
+ context 'when a policy restricts rule creation' do
it "prevents creation of the protected branch rule" do
+ disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
+
post(:create, params: project_params.merge(protected_branch: create_params))
expect(ProtectedBranch.count).to eq 0
@@ -57,10 +56,6 @@ RSpec.describe Projects::ProtectedBranchesController do
describe "PUT #update" do
let(:update_params) { { name: 'new_name' } }
- before do
- sign_in(user)
- end
-
it 'updates the protected branch rule' do
put(:update, params: base_params.merge(protected_branch: update_params))
@@ -68,13 +63,10 @@ RSpec.describe Projects::ProtectedBranchesController do
expect(json_response["name"]).to eq('new_name')
end
- context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
-
+ context 'when a policy restricts rule update' do
it "prevents update of the protected branch rule" do
+ disallow(:update_protected_branch, protected_branch)
+
old_name = protected_branch.name
put(:update, params: base_params.merge(protected_branch: update_params))
@@ -85,10 +77,6 @@ RSpec.describe Projects::ProtectedBranchesController do
end
describe "DELETE #destroy" do
- before do
- sign_in(user)
- end
-
it "deletes the protected branch rule" do
delete(:destroy, params: base_params)
@@ -96,16 +84,18 @@ RSpec.describe Projects::ProtectedBranchesController do
end
context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- allow(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
-
it "prevents deletion of the protected branch rule" do
+ disallow(:destroy_protected_branch, protected_branch)
+
delete(:destroy, params: base_params)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
+
+ def disallow(ability, protected_branch)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, ability, protected_branch).and_return(false)
+ end
end
diff --git a/spec/controllers/projects/tags/releases_controller_spec.rb b/spec/controllers/projects/tags/releases_controller_spec.rb
deleted file mode 100644
index 1d2385f54f9..00000000000
--- a/spec/controllers/projects/tags/releases_controller_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::Tags::ReleasesController do
- let!(:project) { create(:project, :repository) }
- let!(:user) { create(:user) }
- let!(:release) { create(:release, project: project, tag: "v1.1.0") }
- let!(:tag) { release.tag }
-
- before do
- project.add_developer(user)
- sign_in(user)
- end
-
- describe 'GET #edit' do
- it 'initializes a new release' do
- tag_id = release.tag
- project.releases.destroy_all # rubocop: disable Cop/DestroyAll
-
- response = get :edit, params: { namespace_id: project.namespace, project_id: project, tag_id: tag_id }
-
- release = assigns(:release)
- expect(release).not_to be_nil
- expect(release).not_to be_persisted
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'retrieves an existing release' do
- response = get :edit, params: { namespace_id: project.namespace, project_id: project, tag_id: tag }
-
- release = assigns(:release)
- expect(release).not_to be_nil
- expect(release).to be_persisted
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- describe 'PUT #update' do
- it 'updates release note description' do
- response = update_release(release.tag, "description updated")
-
- release = project.releases.find_by(tag: tag)
- expect(release.description).to eq("description updated")
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'creates a release if one does not exist' do
- tag_without_release = create_new_tag
-
- expect do
- update_release(tag_without_release.name, "a new release")
- end.to change { project.releases.count }.by(1)
-
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'sets the release name, sha, and author for a new release' do
- tag_without_release = create_new_tag
-
- response = update_release(tag_without_release.name, "a new release")
-
- release = project.releases.find_by(tag: tag_without_release.name)
- expect(release.name).to eq(tag_without_release.name)
- expect(release.sha).to eq(tag_without_release.target_commit.sha)
- expect(release.author.id).to eq(user.id)
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'does not delete release when description is empty' do
- expect do
- update_release(tag, "")
- end.not_to change { project.releases.count }
-
- expect(release.reload.description).to eq("")
-
- expect(response).to have_gitlab_http_status(:found)
- end
-
- it 'does nothing when description is empty and the tag does not have a release' do
- tag_without_release = create_new_tag
-
- expect do
- update_release(tag_without_release.name, "")
- end.not_to change { project.releases.count }
-
- expect(response).to have_gitlab_http_status(:found)
- end
- end
-
- def create_new_tag
- project.repository.add_tag(user, 'mytag', 'master')
- end
-
- def update_release(tag_id, description)
- put :update, params: {
- namespace_id: project.namespace.to_param,
- project_id: project,
- tag_id: tag_id,
- release: { description: description }
- }
- end
-end
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index 6d2db25ade2..01635f2e158 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -86,47 +86,27 @@ RSpec.describe Projects::UploadsController do
end
context "when not signed in" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads true' do
- before do
- model.update!(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 302" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:redirect)
- end
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads false' do
- before do
- model.update!(enforce_auth_checks_on_uploads: false)
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads true' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: true)
+ end
- it "responds with status 200" do
- show_upload
+ it "responds with status 302" do
+ show_upload
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ expect(response).to have_gitlab_http_status(:redirect)
end
+ end
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads false' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: false)
+ end
- it "responds with status 200" do
- show_upload
+ it "responds with status 200" do
+ show_upload
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -137,41 +117,21 @@ RSpec.describe Projects::UploadsController do
end
context "when the user doesn't have access to the model" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads true' do
- before do
- model.update!(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 404" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads false' do
- before do
- model.update!(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads true' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: true)
+ end
+
+ it "responds with status 404" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
- context "with flag disabled" do
+ context 'when the project has setting enforce_auth_checks_on_uploads false' do
before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
+ model.update!(enforce_auth_checks_on_uploads: false)
end
it "responds with status 200" do
@@ -190,47 +150,27 @@ RSpec.describe Projects::UploadsController do
end
context "when not signed in" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads true' do
- before do
- model.update!(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads true' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: true)
+ end
- context 'when the project has setting enforce_auth_checks_on_uploads false' do
- before do
- model.update!(enforce_auth_checks_on_uploads: false)
- end
+ it "responds with status 200" do
+ show_upload
- it "responds with status 200" do
- show_upload
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads false' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: false)
end
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
+ it "responds with status 200" do
+ show_upload
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
end
end
@@ -241,41 +181,21 @@ RSpec.describe Projects::UploadsController do
end
context "when the user doesn't have access to the model" do
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads true' do
- before do
- model.update!(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- context 'when the project has setting enforce_auth_checks_on_uploads false' do
- before do
- model.update!(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
+ context 'when the project has setting enforce_auth_checks_on_uploads true' do
+ before do
+ model.update!(enforce_auth_checks_on_uploads: true)
+ end
+
+ it "responds with status 200" do
+ show_upload
+
+ expect(response).to have_gitlab_http_status(:ok)
end
end
- context "with flag disabled" do
+ context 'when the project has setting enforce_auth_checks_on_uploads false' do
before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
+ model.update!(enforce_auth_checks_on_uploads: false)
end
it "responds with status 200" do
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 34477a7bb68..94d75ab8d7d 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -878,30 +878,82 @@ RSpec.describe ProjectsController do
end
context 'with project feature attributes' do
- using RSpec::Parameterized::TableSyntax
+ let(:initial_value) { ProjectFeature::PRIVATE }
+ let(:update_to) { ProjectFeature::ENABLED }
- where(:feature, :initial_value, :update_to) do
- :metrics_dashboard_access_level | ProjectFeature::PRIVATE | ProjectFeature::ENABLED
- :container_registry_access_level | ProjectFeature::ENABLED | ProjectFeature::PRIVATE
+ before do
+ project.project_feature.update!(feature_access_level => initial_value)
end
- with_them do
- it "updates the project_feature new" do
- params = {
- namespace_id: project.namespace,
- id: project.path,
- project: {
- project_feature_attributes: {
- "#{feature}": update_to
- }
+ def update_project_feature
+ put :update, params: {
+ namespace_id: project.namespace,
+ id: project.path,
+ project: {
+ project_feature_attributes: {
+ feature_access_level.to_s => update_to
}
}
+ }
+ end
- expect { put :update, params: params }.to change {
- project.reload.project_feature.public_send(feature)
+ shared_examples 'feature update success' do
+ it 'updates access level successfully' do
+ expect { update_project_feature }.to change {
+ project.reload.project_feature.public_send(feature_access_level)
}.from(initial_value).to(update_to)
end
end
+
+ shared_examples 'feature update failure' do
+ it 'cannot update access level' do
+ expect { update_project_feature }.not_to change {
+ project.reload.project_feature.public_send(feature_access_level)
+ }
+ end
+ end
+
+ where(:feature_access_level) do
+ %i[
+ metrics_dashboard_access_level
+ container_registry_access_level
+ environments_access_level
+ feature_flags_access_level
+ releases_access_level
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'feature update success'
+ end
+
+ context 'for feature_access_level operations_access_level' do
+ let(:feature_access_level) { :operations_access_level }
+
+ include_examples 'feature update failure'
+ end
+
+ context 'with feature flag split_operations_visibility_permissions disabled' do
+ before do
+ stub_feature_flags(split_operations_visibility_permissions: false)
+ end
+
+ context 'for feature_access_level operations_access_level' do
+ let(:feature_access_level) { :operations_access_level }
+
+ include_examples 'feature update success'
+ end
+
+ where(:feature_access_level) do
+ %i[
+ environments_access_level feature_flags_access_level
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'feature update failure'
+ end
+ end
end
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index c5a97812d1f..70d4559edc1 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe RegistrationsController do
end
let_it_be(:base_user_params) do
- { first_name: 'first', last_name: 'last', username: 'new_username', email: 'new@user.com', password: 'Any_password' }
+ { first_name: 'first', last_name: 'last', username: 'new_username', email: 'new@user.com', password: User.random_password }
end
let_it_be(:user_params) { { user: base_user_params } }
@@ -222,7 +222,7 @@ RSpec.describe RegistrationsController do
context 'when the registration fails' do
let_it_be(:member) { create(:project_member, :invited) }
let_it_be(:missing_user_params) do
- { username: '', email: member.invite_email, password: 'Any_password' }
+ { username: '', email: member.invite_email, password: User.random_password }
end
let_it_be(:user_params) { { user: missing_user_params } }
@@ -535,7 +535,7 @@ RSpec.describe RegistrationsController do
end
it 'succeeds if password is confirmed' do
- post :destroy, params: { password: '12345678' }
+ post :destroy, params: { password: user.password }
expect_success
end
@@ -576,7 +576,7 @@ RSpec.describe RegistrationsController do
end
it 'fails' do
- delete :destroy, params: { password: '12345678' }
+ delete :destroy, params: { password: user.password }
expect_failure(s_('Profiles|You must transfer ownership or delete groups you are an owner of before you can delete your account'))
end
diff --git a/spec/controllers/search_controller_spec.rb b/spec/controllers/search_controller_spec.rb
index b4d4e01e972..14b198dbefe 100644
--- a/spec/controllers/search_controller_spec.rb
+++ b/spec/controllers/search_controller_spec.rb
@@ -12,47 +12,6 @@ RSpec.describe SearchController do
sign_in(user)
end
- shared_examples_for 'when the user cannot read cross project' do |action, params|
- before do
- allow(Ability).to receive(:allowed?).and_call_original
- allow(Ability).to receive(:allowed?)
- .with(user, :read_cross_project, :global) { false }
- end
-
- it 'blocks access without a project_id' do
- get action, params: params
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'allows access with a project_id' do
- get action, params: params.merge(project_id: create(:project, :public).id)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
- shared_examples_for 'with external authorization service enabled' do |action, params|
- let(:project) { create(:project, namespace: user.namespace) }
- let(:note) { create(:note_on_issue, project: project) }
-
- before do
- enable_external_authorization_service_check
- end
-
- it 'renders a 403 when no project is given' do
- get action, params: params
-
- expect(response).to have_gitlab_http_status(:forbidden)
- end
-
- it 'renders a 200 when a project was set' do
- get action, params: params.merge(project_id: project.id)
-
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
shared_examples_for 'support for active record query timeouts' do |action, params, method_to_stub, format|
before do
allow_next_instance_of(SearchService) do |service|
@@ -133,10 +92,11 @@ RSpec.describe SearchController do
{
chars_under_limit: (('a' * (term_char_limit - 1) + ' ') * (term_limit - 1))[0, char_limit],
chars_over_limit: (('a' * (term_char_limit - 1) + ' ') * (term_limit - 1))[0, char_limit + 1],
- terms_under_limit: ('abc ' * (term_limit - 1)),
+ terms_under_limit: ('abc ' * (term_limit - 1)),
terms_over_limit: ('abc ' * (term_limit + 1)),
term_length_over_limit: ('a' * (term_char_limit + 1)),
- term_length_under_limit: ('a' * (term_char_limit - 1))
+ term_length_under_limit: ('a' * (term_char_limit - 1)),
+ blank: ''
}
end
@@ -147,6 +107,7 @@ RSpec.describe SearchController do
:terms_over_limit | :set_terms_flash
:term_length_under_limit | :not_to_set_flash
:term_length_over_limit | :not_to_set_flash # abuse, so do nothing.
+ :blank | :not_to_set_flash
end
with_them do
@@ -393,6 +354,13 @@ RSpec.describe SearchController do
get(:autocomplete, params: { term: 'foo@bar.com', scope: 'users' })
end
end
+
+ it 'can be filtered with params[:filter]' do
+ get :autocomplete, params: { term: 'setting', filter: 'generic' }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.count).to eq(1)
+ expect(json_response.first['label']).to match(/User settings/)
+ end
end
describe '#append_info_to_payload' do
@@ -410,9 +378,20 @@ RSpec.describe SearchController do
expect(payload[:metadata]['meta.search.project_ids']).to eq(%w(456 789))
expect(payload[:metadata]['meta.search.type']).to eq('basic')
expect(payload[:metadata]['meta.search.level']).to eq('global')
+ expect(payload[:metadata]['meta.search.filters.language']).to eq('ruby')
end
- get :show, params: { scope: 'issues', search: 'hello world', group_id: '123', project_id: '456', project_ids: %w(456 789), confidential: true, state: true, force_search_results: true }
+ get :show, params: {
+ scope: 'issues',
+ search: 'hello world',
+ group_id: '123',
+ project_id: '456',
+ project_ids: %w(456 789),
+ confidential: true,
+ state: true,
+ force_search_results: true,
+ language: 'ruby'
+ }
end
it 'appends the default scope in meta.search.scope' do
diff --git a/spec/events/ci/pipeline_created_event_spec.rb b/spec/events/ci/pipeline_created_event_spec.rb
deleted file mode 100644
index 191c2e450dc..00000000000
--- a/spec/events/ci/pipeline_created_event_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::PipelineCreatedEvent do
- using RSpec::Parameterized::TableSyntax
-
- where(:data, :valid) do
- { pipeline_id: 1 } | true
- { pipeline_id: nil } | false
- { pipeline_id: "test" } | false
- {} | false
- { job_id: 1 } | false
- end
-
- with_them do
- let(:event) { described_class.new(data: data) }
-
- it 'validates the data according to the schema' do
- if valid
- expect { event }.not_to raise_error
- else
- expect { event }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/events/pages/page_deleted_event_spec.rb b/spec/events/pages/page_deleted_event_spec.rb
deleted file mode 100644
index 8fcd807eeb4..00000000000
--- a/spec/events/pages/page_deleted_event_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::PageDeletedEvent do
- where(:data, :valid) do
- [
- [{ project_id: 1, namespace_id: 2 }, true],
- [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
- [{ project_id: 1 }, false],
- [{ namespace_id: 1 }, false],
- [{ project_id: 'foo', namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: 'foo' }, false],
- [{ project_id: [], namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: [] }, false],
- [{ project_id: {}, namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: {} }, false],
- ['foo', false],
- [123, false],
- [[], false]
- ]
- end
-
- with_them do
- it 'validates data' do
- constructor = -> { described_class.new(data: data) }
-
- if valid
- expect { constructor.call }.not_to raise_error
- else
- expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/events/pages/page_deployed_event_spec.rb b/spec/events/pages/page_deployed_event_spec.rb
deleted file mode 100644
index 0c33a95b281..00000000000
--- a/spec/events/pages/page_deployed_event_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Pages::PageDeployedEvent do
- where(:data, :valid) do
- [
- [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
- [{ project_id: 1 }, false],
- [{ namespace_id: 1 }, false],
- [{ project_id: 'foo', namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: 'foo' }, false],
- [{ project_id: [], namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: [] }, false],
- [{ project_id: {}, namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: {} }, false],
- ['foo', false],
- [123, false],
- [[], false]
- ]
- end
-
- with_them do
- it 'validates data' do
- constructor = -> { described_class.new(data: data) }
-
- if valid
- expect { constructor.call }.not_to raise_error
- else
- expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/events/projects/project_created_event_spec.rb b/spec/events/projects/project_created_event_spec.rb
deleted file mode 100644
index d70c737afb0..00000000000
--- a/spec/events/projects/project_created_event_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::ProjectCreatedEvent do
- where(:data, :valid) do
- [
- [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
- [{ project_id: 1 }, false],
- [{ namespace_id: 1 }, false],
- [{ project_id: 'foo', namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: 'foo' }, false],
- [{ project_id: [], namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: [] }, false],
- [{ project_id: {}, namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: {} }, false],
- ['foo', false],
- [123, false],
- [[], false]
- ]
- end
-
- with_them do
- it 'validates data' do
- constructor = -> { described_class.new(data: data) }
-
- if valid
- expect { constructor.call }.not_to raise_error
- else
- expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/events/projects/project_deleted_event_spec.rb b/spec/events/projects/project_deleted_event_spec.rb
deleted file mode 100644
index c3de2b22224..00000000000
--- a/spec/events/projects/project_deleted_event_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::ProjectDeletedEvent do
- where(:data, :valid) do
- [
- [{ project_id: 1, namespace_id: 2 }, true],
- [{ project_id: 1, namespace_id: 2, root_namespace_id: 3 }, true],
- [{ project_id: 1 }, false],
- [{ namespace_id: 1 }, false],
- [{ project_id: 'foo', namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: 'foo' }, false],
- [{ project_id: [], namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: [] }, false],
- [{ project_id: {}, namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: {} }, false],
- ['foo', false],
- [123, false],
- [[], false]
- ]
- end
-
- with_them do
- it 'validates data' do
- constructor = -> { described_class.new(data: data) }
-
- if valid
- expect { constructor.call }.not_to raise_error
- else
- expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/events/projects/project_path_changed_event_spec.rb b/spec/events/projects/project_path_changed_event_spec.rb
deleted file mode 100644
index a157428de04..00000000000
--- a/spec/events/projects/project_path_changed_event_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Projects::ProjectPathChangedEvent do
- where(:data, :valid) do
- valid_event = {
- project_id: 1,
- namespace_id: 2,
- root_namespace_id: 3,
- old_path: 'old',
- new_path: 'new'
- }
-
- # All combinations of missing keys
- with_missing_keys = 0.upto(valid_event.size - 1)
- .flat_map { |size| valid_event.keys.combination(size).to_a }
- .map { |keys| [valid_event.slice(*keys), false] }
-
- [
- [valid_event, true],
- *with_missing_keys,
- [{ project_id: 'foo', namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: 'foo' }, false],
- [{ project_id: [], namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: [] }, false],
- [{ project_id: {}, namespace_id: 2 }, false],
- [{ project_id: 1, namespace_id: {} }, false],
- ['foo', false],
- [123, false],
- [[], false]
- ]
- end
-
- with_them do
- it 'validates data' do
- constructor = -> { described_class.new(data: data) }
-
- if valid
- expect { constructor.call }.not_to raise_error
- else
- expect { constructor.call }.to raise_error(Gitlab::EventStore::InvalidEvent)
- end
- end
- end
-end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 97ddbf21b99..d684f79a518 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -504,6 +504,20 @@ FactoryBot.define do
artifacts_expire_at { 1.minute.ago }
end
+ trait :with_artifacts_paths do
+ options do
+ {
+ artifacts: {
+ name: 'artifacts_file',
+ untracked: false,
+ paths: ['out/'],
+ when: 'always',
+ expire_in: '7d'
+ }
+ }
+ end
+ end
+
trait :with_commit do
after(:build) do |build|
commit = build(:commit, :without_author)
@@ -645,6 +659,19 @@ FactoryBot.define do
end
end
+ trait :multiple_report_artifacts do
+ options do
+ {
+ artifacts: {
+ reports: {
+ sast: 'gl-sast-report.json',
+ container_scanning: 'gl-container-scanning-report.json'
+ }
+ }
+ }
+ end
+ end
+
trait :non_public_artifacts do
options do
{
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index cdbcdced5f4..114ad3a5847 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -102,6 +102,28 @@ FactoryBot.define do
end
end
+ trait :zip_with_single_file do
+ file_type { :archive }
+ file_format { :zip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zip'),
+ 'application/zip')
+ end
+ end
+
+ trait :zip_with_multiple_files do
+ file_type { :archive }
+ file_format { :zip }
+
+ after(:build) do |artifact, evaluator|
+ artifact.file = fixture_file_upload(
+ Rails.root.join('spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zip'),
+ 'application/zip')
+ end
+ end
+
trait :junit do
file_type { :junit }
file_format { :gzip }
diff --git a/spec/factories/ci/runners.rb b/spec/factories/ci/runners.rb
index 18026412261..4758986b47c 100644
--- a/spec/factories/ci/runners.rb
+++ b/spec/factories/ci/runners.rb
@@ -18,11 +18,11 @@ FactoryBot.define do
after(:build) do |runner, evaluator|
evaluator.projects.each do |proj|
- runner.runner_projects << build(:ci_runner_project, project: proj)
+ runner.runner_projects << build(:ci_runner_project, runner: runner, project: proj)
end
evaluator.groups.each do |group|
- runner.runner_namespaces << build(:ci_runner_namespace, namespace: group)
+ runner.runner_namespaces << build(:ci_runner_namespace, runner: runner, namespace: group)
end
end
diff --git a/spec/factories/ci/secure_files.rb b/spec/factories/ci/secure_files.rb
index 9afec5db858..74988202c71 100644
--- a/spec/factories/ci/secure_files.rb
+++ b/spec/factories/ci/secure_files.rb
@@ -6,5 +6,11 @@ FactoryBot.define do
file { fixture_file_upload('spec/fixtures/ci_secure_files/upload-keystore.jks', 'application/octet-stream') }
checksum { 'foo1234' }
project
+
+ trait :remote_store do
+ after(:create) do |ci_secure_file|
+ ci_secure_file.update!(file_store: ObjectStorage::Store::REMOTE)
+ end
+ end
end
end
diff --git a/spec/factories/ci/variables.rb b/spec/factories/ci/variables.rb
index a4cbf873b0b..1f9c12ecbce 100644
--- a/spec/factories/ci/variables.rb
+++ b/spec/factories/ci/variables.rb
@@ -5,6 +5,7 @@ FactoryBot.define do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
masked { false }
+ variable_type { :env_var }
trait(:protected) do
add_attribute(:protected) { true }
diff --git a/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb b/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb
index d6b4b90bbd0..e3f366c17eb 100644
--- a/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb
+++ b/spec/factories/gitlab/database/async_indexes/postgres_async_index.rb
@@ -5,5 +5,9 @@ FactoryBot.define do
sequence(:name) { |n| "users_id_#{n}" }
definition { "CREATE INDEX #{name} ON #{table_name} (id)" }
table_name { "users" }
+
+ trait :with_drop do
+ definition { "DROP INDEX #{name}" }
+ end
end
end
diff --git a/spec/factories/member_roles.rb b/spec/factories/member_roles.rb
new file mode 100644
index 00000000000..bd211844f5a
--- /dev/null
+++ b/spec/factories/member_roles.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :member_role do
+ namespace { association(:group) }
+ base_access_level { Gitlab::Access::DEVELOPER }
+ end
+end
diff --git a/spec/factories/projects.rb b/spec/factories/projects.rb
index d60512e2b2a..95b72648cf5 100644
--- a/spec/factories/projects.rb
+++ b/spec/factories/projects.rb
@@ -37,6 +37,9 @@ FactoryBot.define do
operations_access_level { ProjectFeature::ENABLED }
container_registry_access_level { ProjectFeature::ENABLED }
security_and_compliance_access_level { ProjectFeature::PRIVATE }
+ environments_access_level { ProjectFeature::ENABLED }
+ feature_flags_access_level { ProjectFeature::ENABLED }
+ releases_access_level { ProjectFeature::ENABLED }
# we can't assign the delegated `#ci_cd_settings` attributes directly, as the
# `#ci_cd_settings` relation needs to be created first
@@ -404,6 +407,13 @@ FactoryBot.define do
end
end
+ trait :pages_published do
+ after(:create) do |project|
+ project.mark_pages_onboarding_complete
+ project.mark_pages_as_deployed
+ end
+ end
+
trait :service_desk_disabled do
service_desk_enabled { nil }
end
diff --git a/spec/factories/users.rb b/spec/factories/users.rb
index 70b0af8a36c..2e7c6116fe6 100644
--- a/spec/factories/users.rb
+++ b/spec/factories/users.rb
@@ -5,7 +5,7 @@ FactoryBot.define do
email { generate(:email) }
name { generate(:name) }
username { generate(:username) }
- password { "12345678" }
+ password { User.random_password }
role { 'software_developer' }
confirmed_at { Time.now }
confirmation_token { nil }
diff --git a/spec/factories/users/project_user_callouts.rb b/spec/factories/users/project_user_callouts.rb
new file mode 100644
index 00000000000..50e85315bb9
--- /dev/null
+++ b/spec/factories/users/project_user_callouts.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :project_callout, class: 'Users::ProjectCallout' do
+ feature_name { :awaiting_members_banner }
+
+ user
+ project
+ end
+end
diff --git a/spec/factories/work_items.rb b/spec/factories/work_items.rb
index 81c9fb6ed87..267ea9710b3 100644
--- a/spec/factories/work_items.rb
+++ b/spec/factories/work_items.rb
@@ -10,6 +10,10 @@ FactoryBot.define do
issue_type { :issue }
association :work_item_type, :default
+ trait :confidential do
+ confidential { true }
+ end
+
trait :task do
issue_type { :task }
association :work_item_type, :default, :task
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index 8bf8ef56353..b297d92b2fa 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -6,161 +6,168 @@ RSpec.describe 'Admin Appearance' do
let!(:appearance) { create(:appearance) }
let(:admin) { create(:admin) }
- it 'create new appearance' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
-
- fill_in 'appearance_title', with: 'MyCompany'
- fill_in 'appearance_description', with: 'dev server'
- fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
- fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines'
- click_button 'Update appearance settings'
-
- expect(page).to have_current_path admin_application_settings_appearances_path, ignore_query: true
- expect(page).to have_content 'Appearance'
-
- expect(page).to have_field('appearance_title', with: 'MyCompany')
- expect(page).to have_field('appearance_description', with: 'dev server')
- expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
- expect(page).to have_field('appearance_profile_image_guidelines', with: 'Custom profile image guidelines')
- expect(page).to have_content 'Last edit'
- end
+ flag_values = [true, false]
+ flag_values.each do |val|
+ before do
+ stub_feature_flags(restyle_login_page: val)
+ end
- it 'preview sign-in page appearance' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ it 'create new appearance' do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_application_settings_appearances_path
- visit admin_application_settings_appearances_path
- click_link "Sign-in page"
+ fill_in 'appearance_title', with: 'MyCompany'
+ fill_in 'appearance_description', with: 'dev server'
+ fill_in 'appearance_new_project_guidelines', with: 'Custom project guidelines'
+ fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines'
+ click_button 'Update appearance settings'
- expect(find('#login')).to be_disabled
- expect(find('#password')).to be_disabled
- expect(find('button')).to be_disabled
+ expect(page).to have_current_path admin_application_settings_appearances_path, ignore_query: true
+ expect(page).to have_content 'Appearance'
- expect_custom_sign_in_appearance(appearance)
- end
+ expect(page).to have_field('appearance_title', with: 'MyCompany')
+ expect(page).to have_field('appearance_description', with: 'dev server')
+ expect(page).to have_field('appearance_new_project_guidelines', with: 'Custom project guidelines')
+ expect(page).to have_field('appearance_profile_image_guidelines', with: 'Custom profile image guidelines')
+ expect(page).to have_content 'Last edit'
+ end
- it 'preview new project page appearance', :js do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
+ it 'preview sign-in page appearance' do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
- click_link "New project page"
+ visit admin_application_settings_appearances_path
+ click_link "Sign-in page"
- expect_custom_new_project_appearance(appearance)
- end
+ expect(find('#login')).to be_disabled
+ expect(find('#password')).to be_disabled
+ expect(find('button')).to be_disabled
- context 'Custom system header and footer' do
- before do
+ expect_custom_sign_in_appearance(appearance)
+ end
+
+ it 'preview new project page appearance', :js do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
- end
- context 'when system header and footer messages are empty' do
- it 'shows custom system header and footer fields' do
- visit admin_application_settings_appearances_path
+ visit admin_application_settings_appearances_path
+ click_link "New project page"
- expect(page).to have_field('appearance_header_message', with: '')
- expect(page).to have_field('appearance_footer_message', with: '')
- expect(page).to have_field('appearance_message_background_color')
- expect(page).to have_field('appearance_message_font_color')
- end
+ expect_custom_new_project_appearance(appearance)
end
- context 'when system header and footer messages are not empty' do
+ context 'Custom system header and footer' do
before do
- appearance.update!(header_message: 'Foo', footer_message: 'Bar')
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
end
- it 'shows custom system header and footer fields' do
- visit admin_application_settings_appearances_path
+ context 'when system header and footer messages are empty' do
+ it 'shows custom system header and footer fields' do
+ visit admin_application_settings_appearances_path
- expect(page).to have_field('appearance_header_message', with: appearance.header_message)
- expect(page).to have_field('appearance_footer_message', with: appearance.footer_message)
- expect(page).to have_field('appearance_message_background_color')
- expect(page).to have_field('appearance_message_font_color')
+ expect(page).to have_field('appearance_header_message', with: '')
+ expect(page).to have_field('appearance_footer_message', with: '')
+ expect(page).to have_field('appearance_message_background_color')
+ expect(page).to have_field('appearance_message_font_color')
+ end
end
- end
- end
- it 'custom sign-in page' do
- visit new_user_session_path
+ context 'when system header and footer messages are not empty' do
+ before do
+ appearance.update!(header_message: 'Foo', footer_message: 'Bar')
+ end
- expect_custom_sign_in_appearance(appearance)
- end
+ it 'shows custom system header and footer fields' do
+ visit admin_application_settings_appearances_path
- it 'custom new project page', :js do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit new_project_path
- click_link 'Create blank project'
+ expect(page).to have_field('appearance_header_message', with: appearance.header_message)
+ expect(page).to have_field('appearance_footer_message', with: appearance.footer_message)
+ expect(page).to have_field('appearance_message_background_color')
+ expect(page).to have_field('appearance_message_font_color')
+ end
+ end
+ end
- expect_custom_new_project_appearance(appearance)
- end
+ it 'custom sign-in page' do
+ visit new_user_session_path
- context 'Profile page with custom profile image guidelines' do
- before do
- sign_in(create(:admin))
+ expect_custom_sign_in_appearance(appearance)
+ end
+
+ it 'custom new project page', :js do
+ sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
- fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines, please :smile:!'
- click_button 'Update appearance settings'
+ visit new_project_path
+ click_link 'Create blank project'
+
+ expect_custom_new_project_appearance(appearance)
end
- it 'renders guidelines when set' do
- sign_in create(:user)
- visit profile_path
+ context 'Profile page with custom profile image guidelines' do
+ before do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_application_settings_appearances_path
+ fill_in 'appearance_profile_image_guidelines', with: 'Custom profile image guidelines, please :smile:!'
+ click_button 'Update appearance settings'
+ end
- expect(page).to have_content 'Custom profile image guidelines, please 😄!'
+ it 'renders guidelines when set' do
+ sign_in create(:user)
+ visit profile_path
+
+ expect(page).to have_content 'Custom profile image guidelines, please 😄!'
+ end
end
- end
- it 'appearance logo' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ it 'appearance logo' do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_application_settings_appearances_path
- attach_file(:appearance_logo, logo_fixture)
- click_button 'Update appearance settings'
- expect(page).to have_css(logo_selector)
+ attach_file(:appearance_logo, logo_fixture)
+ click_button 'Update appearance settings'
+ expect(page).to have_css(logo_selector)
- click_link 'Remove logo'
- expect(page).not_to have_css(logo_selector)
- end
+ click_link 'Remove logo'
+ expect(page).not_to have_css(logo_selector)
+ end
- it 'header logos' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ it 'header logos' do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_application_settings_appearances_path
- attach_file(:appearance_header_logo, logo_fixture)
- click_button 'Update appearance settings'
- expect(page).to have_css(header_logo_selector)
+ attach_file(:appearance_header_logo, logo_fixture)
+ click_button 'Update appearance settings'
+ expect(page).to have_css(header_logo_selector)
- click_link 'Remove header logo'
- expect(page).not_to have_css(header_logo_selector)
- end
+ click_link 'Remove header logo'
+ expect(page).not_to have_css(header_logo_selector)
+ end
- it 'Favicon' do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
- visit admin_application_settings_appearances_path
+ it 'Favicon' do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit admin_application_settings_appearances_path
- attach_file(:appearance_favicon, logo_fixture)
- click_button 'Update appearance settings'
+ attach_file(:appearance_favicon, logo_fixture)
+ click_button 'Update appearance settings'
- expect(page).to have_css('.appearance-light-logo-preview')
+ expect(page).to have_css('.appearance-light-logo-preview')
- click_link 'Remove favicon'
+ click_link 'Remove favicon'
- expect(page).not_to have_css('.appearance-light-logo-preview')
+ expect(page).not_to have_css('.appearance-light-logo-preview')
- # allowed file types
- attach_file(:appearance_favicon, Rails.root.join('spec', 'fixtures', 'sanitized.svg'))
- click_button 'Update appearance settings'
+ # allowed file types
+ attach_file(:appearance_favicon, Rails.root.join('spec', 'fixtures', 'sanitized.svg'))
+ click_button 'Update appearance settings'
- expect(page).to have_content 'Favicon You are not allowed to upload "svg" files, allowed types: png, ico'
+ expect(page).to have_content 'Favicon You are not allowed to upload "svg" files, allowed types: png, ico'
+ end
end
def expect_custom_sign_in_appearance(appearance)
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index 659f66a67d2..6b4c9adb096 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -13,248 +13,252 @@ RSpec.describe 'Admin Mode Login' do
click_button 'Verify code'
end
- context 'with valid username/password' do
- let(:user) { create(:admin, :two_factor) }
-
- context 'using one-time code' do
- it 'blocks login if we reuse the same code immediately' do
- gitlab_sign_in(user, remember: true)
-
- expect(page).to have_content('Two-Factor Authentication')
-
- repeated_otp = user.current_otp
- enter_code(repeated_otp)
- gitlab_enable_admin_mode_sign_in(user)
-
- expect(page).to have_content('Two-Factor Authentication')
-
- enter_code(repeated_otp)
-
- expect(page).to have_current_path admin_session_path, ignore_query: true
- expect(page).to have_content('Invalid two-factor code')
- end
+ flag_values = [true, false]
+ flag_values.each do |val|
+ before do
+ stub_feature_flags(restyle_login_page: val)
+ end
+ context 'with valid username/password' do
+ let(:user) { create(:admin, :two_factor) }
- context 'not re-using codes' do
- before do
+ context 'using one-time code' do
+ it 'blocks login if we reuse the same code immediately' do
gitlab_sign_in(user, remember: true)
expect(page).to have_content('Two-Factor Authentication')
- enter_code(user.current_otp)
+ repeated_otp = user.current_otp
+ enter_code(repeated_otp)
gitlab_enable_admin_mode_sign_in(user)
expect(page).to have_content('Two-Factor Authentication')
- end
- it 'allows login with valid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code(user.current_otp)
+ enter_code(repeated_otp)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
- end
- end
-
- it 'blocks login with invalid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code('foo')
-
- expect(page).to have_content('Invalid two-factor code')
- end
+ expect(page).to have_current_path admin_session_path, ignore_query: true
+ expect(page).to have_content('Invalid two-factor code')
end
- it 'allows login with invalid code, then valid code' do
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code('foo')
+ context 'not re-using codes' do
+ before do
+ gitlab_sign_in(user, remember: true)
- expect(page).to have_content('Invalid two-factor code')
+ expect(page).to have_content('Two-factor authentication code')
enter_code(user.current_otp)
+ gitlab_enable_admin_mode_sign_in(user)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
+ expect(page).to have_content('Two-Factor Authentication')
end
- end
- context 'using backup code' do
- let(:codes) { user.generate_otp_backup_codes! }
+ it 'allows login with valid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
- before do
- expect(codes.size).to eq 10
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ it 'blocks login with invalid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code('foo')
- # Ensure the generated codes get saved
- user.save!
+ expect(page).to have_content('Invalid two-factor code')
+ end
end
- context 'with valid code' do
- it 'allows login' do
- enter_code(codes.sample)
+ it 'allows login with invalid code, then valid code' do
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code('foo')
+
+ expect(page).to have_content('Invalid two-factor code')
+
+ enter_code(user.current_otp)
expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
-
- it 'invalidates the used code' do
- expect { enter_code(codes.sample) }
- .to change { user.reload.otp_backup_codes.size }.by(-1)
- end
end
- context 'with invalid code' do
- it 'blocks login' do
- code = codes.sample
- expect(user.invalidate_otp_backup_code!(code)).to eq true
+ context 'using backup code' do
+ let(:codes) { user.generate_otp_backup_codes! }
+
+ before do
+ expect(codes.size).to eq 10
+ # Ensure the generated codes get saved
user.save!
- expect(user.reload.otp_backup_codes.size).to eq 9
+ end
+
+ context 'with valid code' do
+ it 'allows login' do
+ enter_code(codes.sample)
- enter_code(code)
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
- expect(page).to have_content('Invalid two-factor code.')
+ it 'invalidates the used code' do
+ expect { enter_code(codes.sample) }
+ .to change { user.reload.otp_backup_codes.size }.by(-1)
+ end
end
- end
- end
- end
- end
- context 'when logging in via omniauth' do
- let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml', password_automatically_set: false)}
- let(:mock_saml_response) do
- File.read('spec/fixtures/authentication/saml_response.xml')
- end
+ context 'with invalid code' do
+ it 'blocks login' do
+ code = codes.sample
+ expect(user.invalidate_otp_backup_code!(code)).to eq true
+
+ user.save!
+ expect(user.reload.otp_backup_codes.size).to eq 9
+
+ enter_code(code)
- before do
- stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
- providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
+ expect(page).to have_content('Invalid two-factor code.')
+ end
+ end
+ end
+ end
end
- context 'when authn_context is worth two factors' do
+ context 'when logging in via omniauth' do
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml', password_automatically_set: false) }
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
- .gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
- 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
end
- it 'signs user in without prompting for second factor' do
- sign_in_using_saml!
-
- expect(page).not_to have_content('Two-Factor Authentication')
-
- enable_admin_mode_using_saml!
-
- expect(page).not_to have_content('Two-Factor Authentication')
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
+ before do
+ stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
end
- end
- context 'when two factor authentication is required' do
- it 'shows 2FA prompt after omniauth login' do
- sign_in_using_saml!
-
- expect(page).to have_content('Two-Factor Authentication')
- enter_code(user.current_otp)
+ context 'when authn_context is worth two factors' do
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ .gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
+ 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
+ end
- enable_admin_mode_using_saml!
+ it 'signs user in without prompting for second factor' do
+ sign_in_using_saml!
- expect(page).to have_content('Two-Factor Authentication')
+ expect(page).not_to have_content('Two-Factor Authentication')
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
- enter_code(user.current_otp)
+ enable_admin_mode_using_saml!
+ expect(page).not_to have_content('Two-Factor Authentication')
expect(page).to have_current_path admin_root_path, ignore_query: true
expect(page).to have_content('Admin mode enabled')
end
end
- end
- def sign_in_using_saml!
- gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
- end
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after omniauth login' do
+ sign_in_using_saml!
- def enable_admin_mode_using_saml!
- gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
- end
- end
+ expect(page).to have_content('Two-Factor Authentication')
+ enter_code(user.current_otp)
- context 'when logging in via ldap' do
- let(:uid) { 'my-uid' }
- let(:provider_label) { 'Main LDAP' }
- let(:provider_name) { 'main' }
- let(:provider) { "ldap#{provider_name}" }
- let(:ldap_server_config) do
- {
- 'label' => provider_label,
- 'provider_name' => provider,
- 'attributes' => {},
- 'encryption' => 'plain',
- 'uid' => 'uid',
- 'base' => 'dc=example,dc=com'
- }
- end
+ enable_admin_mode_using_saml!
+
+ expect(page).to have_content('Two-Factor Authentication')
- let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: uid, provider: provider) }
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
- before do
- setup_ldap(provider, user, uid, ldap_server_config)
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+ end
+
+ def sign_in_using_saml!
+ gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+
+ def enable_admin_mode_using_saml!
+ gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
end
- context 'when two factor authentication is required' do
- it 'shows 2FA prompt after ldap login' do
- sign_in_using_ldap!(user, provider_label)
+ context 'when logging in via ldap' do
+ let(:uid) { 'my-uid' }
+ let(:provider_label) { 'Main LDAP' }
+ let(:provider_name) { 'main' }
+ let(:provider) { "ldap#{provider_name}" }
+ let(:ldap_server_config) do
+ {
+ 'label' => provider_label,
+ 'provider_name' => provider,
+ 'attributes' => {},
+ 'encryption' => 'plain',
+ 'uid' => 'uid',
+ 'base' => 'dc=example,dc=com'
+ }
+ end
- expect(page).to have_content('Two-Factor Authentication')
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: uid, provider: provider) }
- enter_code(user.current_otp)
- enable_admin_mode_using_ldap!(user)
+ before do
+ setup_ldap(provider, user, uid, ldap_server_config)
+ end
- expect(page).to have_content('Two-Factor Authentication')
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after ldap login' do
+ sign_in_using_ldap!(user, provider_label)
+ expect(page).to have_content('Two-Factor Authentication')
- # Cannot reuse the TOTP
- travel_to(30.seconds.from_now) do
enter_code(user.current_otp)
+ enable_admin_mode_using_ldap!(user)
- expect(page).to have_current_path admin_root_path, ignore_query: true
- expect(page).to have_content('Admin mode enabled')
+ expect(page).to have_content('Two-Factor Authentication')
+
+ # Cannot reuse the TOTP
+ travel_to(30.seconds.from_now) do
+ enter_code(user.current_otp)
+
+ expect(page).to have_current_path admin_root_path, ignore_query: true
+ expect(page).to have_content('Admin mode enabled')
+ end
end
end
- end
- def setup_ldap(provider, user, uid, ldap_server_config)
- stub_ldap_setting(enabled: true)
+ def setup_ldap(provider, user, uid, ldap_server_config)
+ stub_ldap_setting(enabled: true)
- allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
- allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
+ allow(::Gitlab::Auth::Ldap::Config).to receive_messages(enabled: true, servers: [ldap_server_config])
+ allow(Gitlab::Auth::OAuth::Provider).to receive_messages(providers: [provider.to_sym])
- Ldap::OmniauthCallbacksController.define_providers!
- Rails.application.reload_routes!
+ Ldap::OmniauthCallbacksController.define_providers!
+ Rails.application.reload_routes!
- mock_auth_hash(provider, uid, user.email)
- allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).with(user).and_return(true)
+ mock_auth_hash(provider, uid, user.email)
+ allow(Gitlab::Auth::Ldap::Access).to receive(:allowed?).with(user).and_return(true)
- allow_any_instance_of(ActionDispatch::Routing::RoutesProxy)
- .to receive(:"user_#{provider}_omniauth_callback_path")
- .and_return("/users/auth/#{provider}/callback")
- end
+ allow_any_instance_of(ActionDispatch::Routing::RoutesProxy)
+ .to receive(:"user_#{provider}_omniauth_callback_path")
+ .and_return("/users/auth/#{provider}/callback")
+ end
- def sign_in_using_ldap!(user, provider_label)
- visit new_user_session_path
- click_link provider_label
- fill_in 'username', with: user.username
- fill_in 'password', with: user.password
- click_button 'Sign in'
- end
+ def sign_in_using_ldap!(user, provider_label)
+ visit new_user_session_path
+ click_link provider_label
+ fill_in 'username', with: user.username
+ fill_in 'password', with: user.password
+ click_button 'Sign in'
+ end
- def enable_admin_mode_using_ldap!(user)
- visit new_admin_session_path
- click_link provider_label
- fill_in 'username', with: user.username
- fill_in 'password', with: user.password
- click_button 'Enter Admin Mode'
+ def enable_admin_mode_using_ldap!(user)
+ visit new_admin_session_path
+ click_link provider_label
+ fill_in 'username', with: user.username
+ fill_in 'password', with: user.password
+ click_button 'Enter Admin Mode'
+ end
end
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index e5df6cc0fd3..236327ea687 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -357,7 +357,7 @@ RSpec.describe 'Admin::Users' do
end
it 'creates new user' do
- expect { click_button 'Create user' }.to change {User.count}.by(1)
+ expect { click_button 'Create user' }.to change { User.count }.by(1)
end
it 'applies defaults to user' do
@@ -400,7 +400,7 @@ RSpec.describe 'Admin::Users' do
let_it_be(:user_username) { 'Bing bang' }
it "doesn't create the user and shows an error message" do
- expect { click_button 'Create user' }.to change {User.count}.by(0)
+ expect { click_button 'Create user' }.to change { User.count }.by(0)
expect(page).to have_content('The form contains the following error')
expect(page).to have_content('Username can contain only letters, digits')
diff --git a/spec/features/admin_variables_spec.rb b/spec/features/admin_variables_spec.rb
new file mode 100644
index 00000000000..174d4567520
--- /dev/null
+++ b/spec/features/admin_variables_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Instance variables', :js do
+ let(:admin) { create(:admin) }
+ let(:page_path) { ci_cd_admin_application_settings_path }
+
+ let_it_be(:variable) { create(:ci_instance_variable, key: 'test_key', value: 'test_value', masked: true) }
+
+ before do
+ stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ wait_for_requests
+ end
+
+ context 'with disabled ff `ci_variable_settings_graphql' do
+ before do
+ stub_feature_flags(ci_variable_settings_graphql: false)
+ visit page_path
+ end
+
+ it_behaves_like 'variable list', isAdmin: true
+ end
+
+ context 'with enabled ff `ci_variable_settings_graphql' do
+ before do
+ visit page_path
+ end
+
+ it_behaves_like 'variable list', isAdmin: true
+ end
+end
diff --git a/spec/features/boards/board_filters_spec.rb b/spec/features/boards/board_filters_spec.rb
index 537b677cbd0..2e4dc4a29fc 100644
--- a/spec/features/boards/board_filters_spec.rb
+++ b/spec/features/boards/board_filters_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'Issue board filters', :js do
let_it_be(:award_emoji1) { create(:award_emoji, name: 'thumbsup', user: user, awardable: issue_1) }
let(:filtered_search) { find('[data-testid="issue-board-filtered-search"]') }
- let(:filter_input) { find('.gl-filtered-search-term-input')}
+ let(:filter_input) { find('.gl-filtered-search-term-input') }
let(:filter_dropdown) { find('.gl-filtered-search-suggestion-list') }
let(:filter_first_suggestion) { find('.gl-filtered-search-suggestion-list').first('.gl-filtered-search-suggestion') }
let(:filter_submit) { find('.gl-search-box-by-click-search-button') }
@@ -164,7 +164,7 @@ RSpec.describe 'Issue board filters', :js do
end
describe 'filters by type' do
- let_it_be(:incident) { create(:incident, project: project)}
+ let_it_be(:incident) { create(:incident, project: project) }
before do
set_filter('type')
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index e8321adeb42..f279af90aa3 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -135,6 +135,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
@@ -144,6 +145,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
@@ -153,6 +155,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
@@ -272,7 +275,7 @@ RSpec.describe 'Project issue boards', :js do
context 'issue card' do
it 'shows assignee' do
page.within(find('.board:nth-child(2)')) do
- expect(page).to have_selector('.avatar', count: 1)
+ expect(page).to have_selector('.gl-avatar', count: 1)
end
end
@@ -400,6 +403,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
@@ -409,6 +413,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
@@ -417,6 +422,7 @@ RSpec.describe 'Project issue boards', :js do
find('.board .board-list')
inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("window.scrollTo(0, document.body.scrollHeight)")
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
end
diff --git a/spec/features/boards/reload_boards_on_browser_back_spec.rb b/spec/features/boards/reload_boards_on_browser_back_spec.rb
index 6a09e3c9506..7fa440befc1 100644
--- a/spec/features/boards/reload_boards_on_browser_back_spec.rb
+++ b/spec/features/boards/reload_boards_on_browser_back_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Ensure Boards do not show stale data on browser back', :js do
- let(:project) {create(:project, :public)}
- let(:board) {create(:board, project: project)}
- let(:user) {create(:user)}
+ let(:project) { create(:project, :public) }
+ let(:board) { create(:board, project: project) }
+ let(:user) { create(:user) }
context 'authorized user' do
before do
diff --git a/spec/features/clusters/create_agent_spec.rb b/spec/features/clusters/create_agent_spec.rb
index b879ae645f7..c44741b756b 100644
--- a/spec/features/clusters/create_agent_spec.rb
+++ b/spec/features/clusters/create_agent_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Cluster agent registration', :js do
let_it_be(:project) { create(:project, :custom_repo, files: { '.gitlab/agents/example-agent-1/config.yaml' => '' }) }
let_it_be(:current_user) { create(:user, maintainer_projects: [project]) }
+ let_it_be(:token) { Devise.friendly_token }
before do
allow(Gitlab::Kas).to receive(:enabled?).and_return(true)
@@ -18,7 +19,7 @@ RSpec.describe 'Cluster agent registration', :js do
allow(client).to receive(:get_connected_agents).and_return([])
end
- allow(Devise).to receive(:friendly_token).and_return('example-agent-token')
+ allow(Devise).to receive(:friendly_token).and_return(token)
sign_in(current_user)
visit project_clusters_path(project)
@@ -33,7 +34,7 @@ RSpec.describe 'Cluster agent registration', :js do
click_button('Register')
expect(page).to have_content('You cannot see this token again after you close this window.')
- expect(page).to have_content('example-agent-token')
+ expect(page).to have_content(token)
expect(page).to have_content('helm upgrade --install')
expect(page).to have_content('example-agent-2')
diff --git a/spec/features/cycle_analytics_spec.rb b/spec/features/cycle_analytics_spec.rb
index 03d61020ff0..7714783172f 100644
--- a/spec/features/cycle_analytics_spec.rb
+++ b/spec/features/cycle_analytics_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Value Stream Analytics', :js do
+ include CycleAnalyticsHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:stage_table_selector) { '[data-testid="vsa-stage-table"]' }
@@ -213,14 +215,20 @@ RSpec.describe 'Value Stream Analytics', :js do
expect(page.find(metrics_selector)).not_to have_selector("#commits")
end
- it 'needs permissions to see restricted stages' do
+ it 'does not show restricted stages', :aggregate_failures do
expect(find(stage_table_selector)).to have_content(issue.title)
- click_stage('Code')
- expect(find(stage_table_selector)).to have_content('You need permission.')
+ expect(page).to have_selector('.gl-path-nav-list-item', text: 'Issue')
+
+ expect(page).to have_selector('.gl-path-nav-list-item', text: 'Plan')
+
+ expect(page).to have_selector('.gl-path-nav-list-item', text: 'Test')
+
+ expect(page).to have_selector('.gl-path-nav-list-item', text: 'Staging')
+
+ expect(page).not_to have_selector('.gl-path-nav-list-item', text: 'Code')
- click_stage('Review')
- expect(find(stage_table_selector)).to have_content('You need permission.')
+ expect(page).not_to have_selector('.gl-path-nav-list-item', text: 'Review')
end
end
diff --git a/spec/features/dashboard/archived_projects_spec.rb b/spec/features/dashboard/archived_projects_spec.rb
index 1b349fa2276..d157d44bab7 100644
--- a/spec/features/dashboard/archived_projects_spec.rb
+++ b/spec/features/dashboard/archived_projects_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Dashboard Archived Project' do
let(:user) { create :user }
- let(:project) { create :project}
+ let(:project) { create :project }
let(:archived_project) { create(:project, :archived) }
before do
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index f8b68be7f93..91901414dde 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -8,73 +8,41 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
- describe 'feature flag mr_attention_requests is disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
-
- issue.assignees = [user]
- merge_request.update!(assignees: [user])
- sign_in(user)
- end
-
- it 'reflects dashboard issues count' do
- visit issues_path
-
- expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
-
- issue.assignees = []
-
- user.invalidate_cache_counts
+ before do
+ issue.assignees = [user]
+ merge_request.update!(assignees: [user])
+ sign_in(user)
+ end
- travel_to(3.minutes.from_now) do
- visit issues_path
+ it 'reflects dashboard issues count' do
+ visit issues_path
- expect_counters('issues', '0', n_("%d assigned issue", "%d assigned issues", 0) % 0)
- end
- end
-
- it 'reflects dashboard merge requests count', :js do
- visit merge_requests_path
+ expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
- expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
+ issue.assignees = []
- merge_request.update!(assignees: [])
+ user.invalidate_cache_counts
- user.invalidate_cache_counts
-
- travel_to(3.minutes.from_now) do
- visit merge_requests_path
+ travel_to(3.minutes.from_now) do
+ visit issues_path
- expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
- end
+ expect_counters('issues', '0', n_("%d assigned issue", "%d assigned issues", 0) % 0)
end
end
- describe 'feature flag mr_attention_requests is enabled' do
- before do
- merge_request.update!(assignees: [user])
-
- merge_request.find_assignee(user).update!(state: :attention_requested)
-
- user.invalidate_attention_requested_count
-
- sign_in(user)
- end
-
- it 'reflects dashboard merge requests count', :js do
- visit merge_requests_attention_path
+ it 'reflects dashboard merge requests count', :js do
+ visit merge_requests_path
- expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
+ expect_counters('merge_requests', '1', n_("%d merge request", "%d merge requests", 1) % 1)
- merge_request.find_assignee(user).update!(state: :reviewed)
+ merge_request.update!(assignees: [])
- user.invalidate_attention_requested_count
+ user.invalidate_cache_counts
- travel_to(3.minutes.from_now) do
- visit merge_requests_attention_path
+ travel_to(3.minutes.from_now) do
+ visit merge_requests_path
- expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
- end
+ expect_counters('merge_requests', '0', n_("%d merge request", "%d merge requests", 0) % 0)
end
end
@@ -86,10 +54,6 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
merge_requests_dashboard_path(assignee_username: user.username)
end
- def merge_requests_attention_path
- merge_requests_dashboard_path(attention: user.username)
- end
-
def expect_counters(issuable_type, count, badge_label)
dashboard_count = find('.gl-tabs-nav li a.active')
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index fd580b679ad..70f614cdcef 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -112,8 +112,8 @@ RSpec.describe 'Dashboard Merge Requests' do
end
it 'includes assigned and reviewers in badge' do
- within("span[aria-label='#{n_("%d merge request", "%d merge requests", 0) % 0}']") do
- expect(page).to have_content('0')
+ within("span[aria-label='#{n_("%d merge request", "%d merge requests", 3) % 3}']") do
+ expect(page).to have_content('3')
end
find('.dashboard-shortcuts-merge_requests').click
diff --git a/spec/features/error_tracking/user_filters_errors_by_status_spec.rb b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
index d5dbe259159..2ac43f67f64 100644
--- a/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
+++ b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
@@ -10,8 +10,8 @@ RSpec.describe 'When a user filters Sentry errors by status', :js, :use_clean_ra
let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
let(:issues_api_url_filter) { "#{sentry_api_urls.issues_url}?limit=20&query=is:ignored" }
- let(:auth_token) {{ 'Authorization' => 'Bearer access_token_123' }}
- let(:return_header) {{ 'Content-Type' => 'application/json' }}
+ let(:auth_token) { { 'Authorization' => 'Bearer access_token_123' } }
+ let(:return_header) { { 'Content-Type' => 'application/json' } }
before do
stub_request(:get, issues_api_url).with(headers: auth_token)
diff --git a/spec/features/group_variables_spec.rb b/spec/features/group_variables_spec.rb
index 9af9baeb5bb..ab24162ad5a 100644
--- a/spec/features/group_variables_spec.rb
+++ b/spec/features/group_variables_spec.rb
@@ -23,7 +23,11 @@ RSpec.describe 'Group variables', :js do
it_behaves_like 'variable list'
end
- # TODO: Uncomment when the new graphQL app for variable settings
- # is enabled.
- # it_behaves_like 'variable list'
+ context 'with enabled ff `ci_variable_settings_graphql' do
+ before do
+ visit page_path
+ end
+
+ it_behaves_like 'variable list'
+ end
end
diff --git a/spec/features/groups/crm/contacts/create_spec.rb b/spec/features/groups/crm/contacts/create_spec.rb
index d6c6e3f1745..b10b2afe35c 100644
--- a/spec/features/groups/crm/contacts/create_spec.rb
+++ b/spec/features/groups/crm/contacts/create_spec.rb
@@ -22,7 +22,9 @@ RSpec.describe 'Create a CRM contact', :js do
fill_in 'description', with: 'VIP'
click_button 'Save changes'
- expect(page).to have_content 'gitlab@example.com'
+ wait_for_requests
+
+ expect(group.contacts.first.email).to eq('gitlab@example.com')
expect(page).to have_current_path("#{group_crm_contacts_path(group)}/", ignore_query: true)
end
end
diff --git a/spec/features/groups/group_runners_spec.rb b/spec/features/groups/group_runners_spec.rb
index a129db6cb6f..b98c94b030d 100644
--- a/spec/features/groups/group_runners_spec.rb
+++ b/spec/features/groups/group_runners_spec.rb
@@ -149,77 +149,39 @@ RSpec.describe "Group Runners" do
create(:ci_runner, :group, groups: [group], description: 'runner-foo', contacted_at: Time.zone.now)
end
- context 'when group_runner_view_ui is disabled' do
- before do
- stub_feature_flags(group_runner_view_ui: false)
- end
-
- it 'user edits the runner to be protected' do
- visit edit_group_runner_path(group, runner)
+ it 'user views runner details' do
+ visit group_runner_path(group, runner)
- expect(page.find_field('runner[access_level]')).not_to be_checked
-
- check 'runner_access_level'
- click_button 'Save changes'
-
- expect(page).to have_content 'Protected Yes'
- end
-
- context 'when a runner has a tag' do
- before do
- runner.update!(tag_list: ['tag'])
- end
+ expect(page).to have_content "#{s_('Runners|Description')} runner-foo"
+ end
- it 'user edits runner not to run untagged jobs' do
- visit edit_group_runner_path(group, runner)
+ it 'user edits the runner to be protected' do
+ visit edit_group_runner_path(group, runner)
- expect(page.find_field('runner[run_untagged]')).to be_checked
+ expect(page.find_field('runner[access_level]')).not_to be_checked
- uncheck 'runner_run_untagged'
- click_button 'Save changes'
+ check 'runner_access_level'
+ click_button _('Save changes')
- expect(page).to have_content 'Can run untagged jobs No'
- end
- end
+ expect(page).to have_content "#{s_('Runners|Configuration')} #{s_('Runners|Protected')}"
end
- context 'when group_runner_view_ui is enabled' do
+ context 'when a runner has a tag' do
before do
- stub_feature_flags(group_runner_view_ui: true)
+ runner.update!(tag_list: ['tag1'])
end
- it 'user views runner details' do
- visit group_runner_path(group, runner)
-
- expect(page).to have_content "#{s_('Runners|Description')} runner-foo"
- end
-
- it 'user edits the runner to be protected' do
+ it 'user edits runner not to run untagged jobs' do
visit edit_group_runner_path(group, runner)
- expect(page.find_field('runner[access_level]')).not_to be_checked
+ page.find_field('runner[tag_list]').set('tag1, tag2')
- check 'runner_access_level'
+ uncheck 'runner_run_untagged'
click_button _('Save changes')
- expect(page).to have_content "#{s_('Runners|Configuration')} #{s_('Runners|Protected')}"
- end
-
- context 'when a runner has a tag' do
- before do
- runner.update!(tag_list: ['tag'])
- end
-
- it 'user edits runner not to run untagged jobs' do
- visit edit_group_runner_path(group, runner)
-
- page.find_field('runner[tag_list]').set('tag, tag2')
-
- uncheck 'runner_run_untagged'
- click_button _('Save changes')
-
- expect(page).to have_content "#{s_('Runners|Tags')} tag tag2"
- end
+ # Tags can be in any order
+ expect(page).to have_content /#{s_('Runners|Tags')}.*tag1/
+ expect(page).to have_content /#{s_('Runners|Tags')}.*tag2/
end
end
end
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index c86705832b1..eec07c84cde 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -7,12 +7,12 @@ RSpec.describe 'Group issues page' do
include DragTo
let(:group) { create(:group) }
- let(:project) { create(:project, :public, group: group)}
+ let(:project) { create(:project, :public, group: group) }
let(:project_with_issues_disabled) { create(:project, :issues_disabled, group: group) }
let(:path) { issues_group_path(group) }
context 'with shared examples', :js do
- let(:issuable) { create(:issue, project: project, title: "this is my created issuable")}
+ let(:issuable) { create(:issue, project: project, title: "this is my created issuable") }
include_examples 'project features apply to issuables', Issue
@@ -68,7 +68,7 @@ RSpec.describe 'Group issues page' do
context 'issues list', :js do
let(:subgroup) { create(:group, parent: group) }
- let(:subgroup_project) { create(:project, :public, group: subgroup)}
+ let(:subgroup_project) { create(:project, :public, group: subgroup) }
let(:user_in_group) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
let!(:issue) { create(:issue, project: project, title: 'root group issue') }
let!(:subgroup_issue) { create(:issue, project: subgroup_project, title: 'subgroup issue') }
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 468001c3be6..5f28afc23f1 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -74,8 +74,8 @@ RSpec.describe 'Groups > Members > Manage members' do
invite_member(user1.name, role: 'Reporter', refresh: false)
- expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_content("not authorized to update member")
+ invite_modal = page.find(invite_modal_selector)
+ expect(invite_modal).to have_content("not authorized to update member")
page.refresh
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index 9a1e216c6d2..d814906a274 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe 'Group show page' do
it 'shows `Create new subgroup` link' do
expect(page).to have_link(
s_('GroupsEmptyState|Create new subgroup'),
- href: new_group_path(parent_id: group.id)
+ href: new_group_path(parent_id: group.id, anchor: 'create-group-pane')
)
end
@@ -97,28 +97,43 @@ RSpec.describe 'Group show page' do
end
end
- context 'when a public project is shared with a private group' do
- let_it_be(:private_group) { create(:group, :private) }
+ context 'visibility warning popover' do
let_it_be(:public_project) { create(:project, :public) }
- let_it_be(:project_group_link) { create(:project_group_link, group: private_group, project: public_project) }
- before do
- private_group.add_owner(user)
- sign_in(user)
- end
+ shared_examples 'it shows warning popover' do
+ it 'shows warning popover', :js do
+ group_to_share_with.add_owner(user)
+ sign_in(user)
+ visit group_path(group_to_share_with)
+
+ click_link _('Shared projects')
+
+ wait_for_requests
- it 'shows warning popover', :js do
- visit group_path(private_group)
+ page.within("[data-testid=\"group-overview-item-#{public_project.id}\"]") do
+ click_button _('Less restrictive visibility')
+ end
+
+ expect(page).to have_content _('Project visibility level is less restrictive than the group settings.')
+ end
+ end
- click_link _('Shared projects')
+ context 'when a public project is shared with a private group' do
+ let_it_be(:group_to_share_with) { create(:group, :private) }
+ let_it_be(:project_group_link) do
+ create(:project_group_link, group: group_to_share_with, project: public_project)
+ end
- wait_for_requests
+ include_examples 'it shows warning popover'
+ end
- page.within("[data-testid=\"group-overview-item-#{public_project.id}\"]") do
- click_button _('Less restrictive visibility')
+ context 'when a public project is shared with an internal group' do
+ let_it_be(:group_to_share_with) { create(:group, :internal) }
+ let_it_be(:project_group_link) do
+ create(:project_group_link, group: group_to_share_with, project: public_project)
end
- expect(page).to have_content _('Project visibility level is less restrictive than the group settings.')
+ include_examples 'it shows warning popover'
end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index ece6167b193..c93ed01b873 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -221,14 +221,13 @@ RSpec.describe 'Group' do
let(:user) { create(:admin) }
before do
- visit new_group_path(parent_id: group.id)
+ visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')
end
context 'when admin mode is enabled', :enable_admin_mode do
it 'creates a nested group' do
- click_link 'Create group'
- fill_in 'Group name', with: 'bar'
- click_button 'Create group'
+ fill_in 'Subgroup name', with: 'bar'
+ click_button 'Create subgroup'
expect(page).to have_current_path(group_path('foo/bar'), ignore_query: true)
expect(page).to have_selector 'h1', text: 'bar'
@@ -237,7 +236,7 @@ RSpec.describe 'Group' do
context 'when admin mode is disabled' do
it 'is not allowed' do
- expect(page).not_to have_button('Create group')
+ expect(page).not_to have_button('Create subgroup')
end
end
end
@@ -250,11 +249,10 @@ RSpec.describe 'Group' do
sign_out(:user)
sign_in(user)
- visit new_group_path(parent_id: group.id)
- click_link 'Create group'
+ visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')
- fill_in 'Group name', with: 'bar'
- click_button 'Create group'
+ fill_in 'Subgroup name', with: 'bar'
+ click_button 'Create subgroup'
expect(page).to have_current_path(group_path('foo/bar'), ignore_query: true)
expect(page).to have_selector 'h1', text: 'bar'
@@ -268,7 +266,7 @@ RSpec.describe 'Group' do
end
context 'when creating subgroup' do
- let(:path) { new_group_path(parent_id: group.id) }
+ let(:path) { new_group_path(parent_id: group.id, anchor: 'create-group-pane') }
it 'does not render recaptcha' do
visit path
@@ -278,24 +276,50 @@ RSpec.describe 'Group' do
end
end
+ context 'when many parent groups are available' do
+ let_it_be(:group2) { create(:group, path: 'foo2') }
+ let_it_be(:group3) { create(:group, path: 'foo3') }
+
+ before do
+ group.add_owner(user)
+ group2.add_maintainer(user)
+ group3.add_developer(user)
+ visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')
+ end
+
+ it 'creates private subgroup' do
+ fill_in 'Subgroup name', with: 'bar'
+ click_button 'foo'
+
+ expect(page).to have_css('[data-testid="select_group_dropdown_item"]', text: 'foo2')
+ expect(page).not_to have_css('[data-testid="select_group_dropdown_item"]', text: 'foo3')
+
+ click_button 'foo2'
+ click_button 'Create subgroup'
+
+ expect(page).to have_current_path(group_path('foo2/bar'), ignore_query: true)
+ expect(page).to have_selector('h1', text: 'bar')
+ expect(page).to have_selector('.visibility-icon [data-testid="lock-icon"]')
+ end
+ end
+
describe 'real-time group url validation', :js do
let_it_be(:subgroup) { create(:group, path: 'sub', parent: group) }
before do
group.add_owner(user)
- visit new_group_path(parent_id: group.id)
- click_link 'Create group'
+ visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')
end
it 'shows a message if group url is available' do
- fill_in 'Group URL', with: group.path
+ fill_in 'Subgroup slug', with: group.path
wait_for_requests
expect(page).to have_content('Group path is available')
end
it 'shows an error if group url is taken' do
- fill_in 'Group URL', with: subgroup.path
+ fill_in 'Subgroup slug', with: subgroup.path
wait_for_requests
expect(page).to have_content("Group path is unavailable. Path has been replaced with a suggested available path.")
@@ -308,7 +332,7 @@ RSpec.describe 'Group' do
sign_out(:user)
sign_in(create(:user))
- visit new_group_path(parent_id: group.id)
+ visit new_group_path(parent_id: group.id, anchor: 'create-group-pane')
expect(page).to have_title('Not Found')
expect(page).to have_content('Page Not Found')
@@ -354,7 +378,7 @@ RSpec.describe 'Group' do
end
it 'removes group', :sidekiq_might_not_need_inline do
- expect { remove_with_confirm('Remove group', group.path) }.to change {Group.count}.by(-1)
+ expect { remove_with_confirm('Remove group', group.path) }.to change { Group.count }.by(-1)
expect(group.members.all.count).to be_zero
expect(page).to have_content "scheduled for deletion"
end
@@ -507,8 +531,8 @@ RSpec.describe 'Group' do
let_it_be(:storage_enforcement_date) { Date.today + 30 }
before do
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
end
@@ -518,8 +542,8 @@ RSpec.describe 'Group' do
end
it 'does not display the banner in a paid group page' do
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:paid?).and_return(true)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:paid?).and_return(true)
end
visit group_path(group)
expect_page_not_to_have_storage_enforcement_banner
@@ -534,8 +558,8 @@ RSpec.describe 'Group' do
expect_page_not_to_have_storage_enforcement_banner
storage_enforcement_date = Date.today + 13
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
page.refresh
expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
@@ -543,8 +567,12 @@ RSpec.describe 'Group' do
end
context 'with storage_enforcement_date not set' do
- # This test should break and be rewritten after the implementation of the storage_enforcement_date
- # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(nil)
+ end
+ end
+
it 'does not display the banner in the group page' do
stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
visit group_path(group)
@@ -554,10 +582,10 @@ RSpec.describe 'Group' do
end
def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
- expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ expect(page).to have_text "Effective #{storage_enforcement_date}, namespace storage limits will apply"
end
def expect_page_not_to_have_storage_enforcement_banner
- expect(page).not_to have_text "storage limits will apply to this namespace"
+ expect(page).not_to have_text "namespace storage limits will apply"
end
end
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index fe804dc52d7..1baa97096d9 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -182,12 +182,14 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
context 'email confirmation disabled' do
let(:send_email_confirmation) { false }
- it 'signs up and redirects to the most recent membership activity page with all the projects/groups invitations automatically accepted' do
- fill_in_sign_up_form(new_user)
- fill_in_welcome_form
+ context 'the user signs up for an account with the invitation email address' do
+ it 'redirects to the most recent membership activity page with all the projects/groups invitations automatically accepted' do
+ fill_in_sign_up_form(new_user)
+ fill_in_welcome_form
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
- expect(page).to have_content('You have been granted Owner access to group Owned.')
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(page).to have_content('You have been granted Owner access to group Owned.')
+ end
end
context 'the user sign-up using a different email address' do
@@ -227,11 +229,13 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- it 'signs up and redirects to the group activity page with all the project/groups invitation automatically accepted' do
- fill_in_sign_up_form(new_user)
- fill_in_welcome_form
+ context 'the user signs up for an account with the invitation email address' do
+ it 'redirects to the most recent membership activity page with all the projects/groups invitations automatically accepted' do
+ fill_in_sign_up_form(new_user)
+ fill_in_welcome_form
- expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ end
end
context 'the user sign-up using a different email address' do
diff --git a/spec/features/issuables/user_sees_sidebar_spec.rb b/spec/features/issuables/user_sees_sidebar_spec.rb
index 04bf704b6a4..66ed6044de6 100644
--- a/spec/features/issuables/user_sees_sidebar_spec.rb
+++ b/spec/features/issuables/user_sees_sidebar_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'Issue Sidebar on Mobile' do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:issue) { create(:issue, project: project) }
- let!(:user) { create(:user)}
+ let!(:user) { create(:user) }
before do
sign_in(user)
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index 0700423983f..e749c555dcf 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -319,6 +319,13 @@ RSpec.describe 'New/edit issue', :js do
end
end
end
+
+ describe 'when repository contains CONTRIBUTING.md' do
+ it 'has contribution guidelines prompt' do
+ text = _('Please review the %{linkStart}contribution guidelines%{linkEnd} for this project.') % { linkStart: nil, linkEnd: nil }
+ expect(find('#new_issue')).to have_text(text)
+ end
+ end
end
describe 'new issue with query parameters' do
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index 8732e2ecff2..fa4ce6fe1c1 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -414,7 +414,7 @@ RSpec.describe 'GFM autocomplete', :js do
it 'shows all contacts' do
page.within(find_autocomplete_menu) do
- expected_data = contacts.map { |c| "#{c.first_name} #{c.last_name} #{c.email}"}
+ expected_data = contacts.map { |c| "#{c.first_name} #{c.last_name} #{c.email}" }
expect(page.all('li').map(&:text)).to match_array(expected_data)
end
diff --git a/spec/features/issues/incident_issue_spec.rb b/spec/features/issues/incident_issue_spec.rb
index d6ec7f1c539..56be1493ed2 100644
--- a/spec/features/issues/incident_issue_spec.rb
+++ b/spec/features/issues/incident_issue_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Incident Detail', :js do
project.add_developer(user)
sign_in(user)
- visit project_issue_path(project, incident)
+ visit project_issues_incident_path(project, incident)
wait_for_requests
end
@@ -49,72 +49,32 @@ RSpec.describe 'Incident Detail', :js do
expect(incident_tabs).to have_content('Original alert: #1')
end
- aggregate_failures 'shows the Alert details tab' do
- click_link 'Alert details'
-
- expect(incident_tabs).to have_content('"title": "Alert title"')
- expect(incident_tabs).to have_content('"yet.another": 73')
- end
- end
- end
-
- context 'when on summary tab' do
- before do
- click_link 'Summary'
- end
-
- it 'shows the summary tab with all components' do
- page.within('.issuable-details') do
+ aggregate_failures 'when on summary tab (default tab)' do
hidden_items = find_all('.js-issue-widgets')
# Linked Issues/MRs and comment box
expect(hidden_items.count).to eq(2)
-
expect(hidden_items).to all(be_visible)
- end
- end
-
- it 'shows the edit title and description button' do
- edit_button = find_all('[aria-label="Edit title and description"]')
-
- expect(edit_button).to all(be_visible)
- end
- end
-
- context 'when on alert details tab' do
- before do
- click_link 'Alert details'
- end
-
- it 'does not show the linked issues and notes/comment components' do
- page.within('.issuable-details') do
- hidden_items = find_all('.js-issue-widgets')
- # Linked Issues/MRs and comment box are hidden on page
- expect(hidden_items.count).to eq(0)
+ edit_button = find_all('[aria-label="Edit title and description"]')
+ expect(edit_button).to all(be_visible)
end
- end
- it 'does not show the edit title and description button' do
- edit_button = find_all('[aria-label="Edit title and description"]')
-
- expect(edit_button.count).to eq(0)
- end
- end
+ aggregate_failures 'shows the Alert details tab' do
+ click_link 'Alert details'
- context 'when on timeline events tab from incident route' do
- before do
- visit project_issues_incident_path(project, incident)
- wait_for_requests
- click_link 'Timeline'
- end
+ expect(incident_tabs).to have_content('"title": "Alert title"')
+ expect(incident_tabs).to have_content('"yet.another": 73')
- it 'does not show the linked issues and notes/comment components' do
- page.within('.issuable-details') do
+ # does not show the linked issues and notes/comment components' do
hidden_items = find_all('.js-issue-widgets')
# Linked Issues/MRs and comment box are hidden on page
expect(hidden_items.count).to eq(0)
+
+ # does not show the edit title and description button
+ edit_button = find_all('[aria-label="Edit title and description"]')
+ expect(edit_button.count).to eq(0)
end
end
end
@@ -126,7 +86,7 @@ RSpec.describe 'Incident Detail', :js do
click_link 'Timeline'
end
- it 'does not show the linked issues and notes/comment commponents' do
+ it 'does not show the linked issues and notes/comment components' do
page.within('.issuable-details') do
hidden_items = find_all('.js-issue-widgets')
@@ -140,7 +100,7 @@ RSpec.describe 'Incident Detail', :js do
before do
stub_feature_flags(incident_timeline: false)
- visit project_issue_path(project, incident)
+ visit project_issues_incident_path(project, incident)
wait_for_requests
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index aaa478378a9..8819f085a5f 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -84,8 +84,10 @@ RSpec.describe 'Issue Sidebar' do
click_link user2.name
end
- find('.js-right-sidebar').click
- find('.block.assignee .edit-link').click
+ within '.js-right-sidebar' do
+ find('.block.assignee').click(x: 0, y: 0)
+ find('.block.assignee .edit-link').click
+ end
expect(page.all('.dropdown-menu-user li').length).to eq(1)
expect(find('.dropdown-input-field').value).to eq(user2.name)
@@ -182,7 +184,7 @@ RSpec.describe 'Issue Sidebar' do
page.within '.dropdown-menu-user' do
expect(page).not_to have_content 'Unassigned'
- click_link user2.name
+ click_button user2.name
end
find('.participants').click
diff --git a/spec/features/issues/related_issues_spec.rb b/spec/features/issues/related_issues_spec.rb
index a95229d4f1b..818e99f2ec9 100644
--- a/spec/features/issues/related_issues_spec.rb
+++ b/spec/features/issues/related_issues_spec.rb
@@ -232,7 +232,9 @@ RSpec.describe 'Related issues', :js do
it 'add related issue' do
click_button 'Add a related issue'
fill_in 'Paste issue link', with: "#{issue_b.to_reference(project)} "
- click_button 'Add'
+ page.within('.linked-issues-card-body') do
+ click_button 'Add'
+ end
wait_for_requests
@@ -249,7 +251,9 @@ RSpec.describe 'Related issues', :js do
it 'add cross-project related issue' do
click_button 'Add a related issue'
fill_in 'Paste issue link', with: "#{issue_project_b_a.to_reference(project)} "
- click_button 'Add'
+ page.within('.linked-issues-card-body') do
+ click_button 'Add'
+ end
wait_for_requests
@@ -359,7 +363,9 @@ RSpec.describe 'Related issues', :js do
it 'add related issue' do
click_button 'Add a related issue'
fill_in 'Paste issue link', with: "##{issue_d.iid} "
- click_button 'Add'
+ page.within('.linked-issues-card-body') do
+ click_button 'Add'
+ end
wait_for_requests
@@ -375,7 +381,9 @@ RSpec.describe 'Related issues', :js do
it 'add invalid related issue' do
click_button 'Add a related issue'
fill_in 'Paste issue link', with: '#9999999 '
- click_button 'Add'
+ page.within('.linked-issues-card-body') do
+ click_button 'Add'
+ end
wait_for_requests
@@ -390,7 +398,9 @@ RSpec.describe 'Related issues', :js do
it 'add unauthorized related issue' do
click_button 'Add a related issue'
fill_in 'Paste issue link', with: "#{issue_project_unauthorized_a.to_reference(project)} "
- click_button 'Add'
+ page.within('.linked-issues-card-body') do
+ click_button 'Add'
+ end
wait_for_requests
diff --git a/spec/features/issues/todo_spec.rb b/spec/features/issues/todo_spec.rb
index d63d21353e5..6a53c12eda3 100644
--- a/spec/features/issues/todo_spec.rb
+++ b/spec/features/issues/todo_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Manually create a todo item from issue', :js do
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
- let!(:user) { create(:user)}
+ let!(:user) { create(:user) }
before do
project.add_maintainer(user)
diff --git a/spec/features/issues/user_bulk_edits_issues_spec.rb b/spec/features/issues/user_bulk_edits_issues_spec.rb
index 0533f1688e2..1ef2918adec 100644
--- a/spec/features/issues/user_bulk_edits_issues_spec.rb
+++ b/spec/features/issues/user_bulk_edits_issues_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe 'Multiple issue updating from issues#index', :js do
let!(:project) { create(:project) }
let!(:issue) { create(:issue, project: project) }
- let!(:user) { create(:user)}
+ let!(:user) { create(:user) }
before do
project.add_maintainer(user)
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 151d3c60fa2..e29911e3263 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -151,7 +151,7 @@ RSpec.describe "User creates issue" do
click_button 'Cancel'
end
- expect(page).to have_button('Attach a file')
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_button('Cancel')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
@@ -188,7 +188,7 @@ RSpec.describe "User creates issue" do
end
it 'does not hide the milestone select' do
- expect(page).to have_selector('.qa-issuable-milestone-dropdown') # rubocop:disable QA/SelectorUsage
+ expect(page).to have_selector('[data-testid="issuable-milestone-dropdown"]')
end
end
@@ -204,7 +204,7 @@ RSpec.describe "User creates issue" do
end
it 'shows the milestone select' do
- expect(page).to have_selector('.qa-issuable-milestone-dropdown') # rubocop:disable QA/SelectorUsage
+ expect(page).to have_selector('[data-testid="issuable-milestone-dropdown"]')
end
it 'hides the incident help text' do
@@ -265,7 +265,7 @@ RSpec.describe "User creates issue" do
end
it 'shows the milestone select' do
- expect(page).to have_selector('.qa-issuable-milestone-dropdown') # rubocop:disable QA/SelectorUsage
+ expect(page).to have_selector('[data-testid="issuable-milestone-dropdown"]')
end
it 'hides the weight input' do
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 3b440002cb5..4eecb63c47e 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe "Issues > User edits issue", :js do
page.within '.block.labels' do
# Remove `verisimilitude` label
- within '.gl-label' do
+ within '.gl-label', text: 'verisimilitude' do
click_button 'Remove label'
end
@@ -285,7 +285,7 @@ RSpec.describe "Issues > User edits issue", :js do
end
page.within '.dropdown-menu-user' do
- click_link user.name
+ click_button user.name
end
page.within('.assignee') do
@@ -306,7 +306,7 @@ RSpec.describe "Issues > User edits issue", :js do
click_button('Edit')
wait_for_requests
- click_link user.name
+ click_button user.name
find('[data-testid="title"]').click
wait_for_requests
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index 892b57bac5c..c86a2c32e2d 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'User interacts with awards' do
let(:user) { create(:user) }
describe 'User interacts with awards in an issue', :js do
- let(:issue) { create(:issue, project: project)}
+ let(:issue) { create(:issue, project: project) }
let(:project) { create(:project) }
before do
diff --git a/spec/features/issues/user_uses_quick_actions_spec.rb b/spec/features/issues/user_uses_quick_actions_spec.rb
index c6d743ed38f..d458c991668 100644
--- a/spec/features/issues/user_uses_quick_actions_spec.rb
+++ b/spec/features/issues/user_uses_quick_actions_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Issues > User uses quick actions', :js do
let!(:label_feature) { create(:label, project: project, title: 'feature') }
let!(:milestone) { create(:milestone, project: project, title: 'ASAP') }
let(:issuable) { create(:issue, project: project) }
- let(:source_issuable) { create(:issue, project: project, milestone: milestone, labels: [label_bug, label_feature])}
+ let(:source_issuable) { create(:issue, project: project, milestone: milestone, labels: [label_bug, label_feature]) }
it_behaves_like 'close quick action', :issue
it_behaves_like 'issuable time tracker', :issue
diff --git a/spec/features/markdown/copy_as_gfm_spec.rb b/spec/features/markdown/copy_as_gfm_spec.rb
index d472134a2c7..b5bf9279371 100644
--- a/spec/features/markdown/copy_as_gfm_spec.rb
+++ b/spec/features/markdown/copy_as_gfm_spec.rb
@@ -109,10 +109,24 @@ RSpec.describe 'Copy as GFM', :js do
<<~GFM,
* [ ] Unchecked task
* [x] Checked task
+ * [~] Inapplicable task
+ * [~] Inapplicable task with ~~del~~ and <s>strike</s> embedded
GFM
- <<~GFM
+ <<~GFM,
1. [ ] Unchecked ordered task
1. [x] Checked ordered task
+ 1. [~] Inapplicable ordered task
+ 1. [~] Inapplicable ordered task with ~~del~~ and <s>strike</s> embedded
+ GFM
+ <<~GFM
+ * [ ] Unchecked loose list task
+ * [x] Checked loose list task
+ * [~] Inapplicable loose list task
+
+ With a paragraph
+ * [~] Inapplicable loose list task with ~~del~~ and <s>strike</s> embedded
+
+ With a paragraph
GFM
)
@@ -605,7 +619,8 @@ RSpec.describe 'Copy as GFM', :js do
'###### Heading',
'**Bold**',
'*Italics*',
- '~~Strikethrough~~',
+ '~~Strikethrough (del)~~',
+ '<s>Strikethrough</s>',
'---',
# table
<<~GFM,
diff --git a/spec/features/markdown/gitlab_flavored_markdown_spec.rb b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
index da4208318eb..e831d1be608 100644
--- a/spec/features/markdown/gitlab_flavored_markdown_spec.rb
+++ b/spec/features/markdown/gitlab_flavored_markdown_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe "GitLab Flavored Markdown" do
+ include CycleAnalyticsHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
diff --git a/spec/features/markdown/json_table_spec.rb b/spec/features/markdown/json_table_spec.rb
new file mode 100644
index 00000000000..6b74dbac255
--- /dev/null
+++ b/spec/features/markdown/json_table_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Rendering json:table code block in markdown', :js do
+ let_it_be(:project) { create(:project, :public) }
+
+ it 'creates table correctly' do
+ description = <<~JSONTABLE
+ Hello world!
+
+ ```json:table
+ {
+ "fields" : [
+ {"key": "a", "label": "AA"},
+ {"key": "b", "label": "BB"}
+ ],
+ "items" : [
+ {"a": "11", "b": "22"},
+ {"a": "211", "b": "222"}
+ ]
+ }
+ ```
+ JSONTABLE
+
+ issue = create(:issue, project: project, description: description)
+
+ visit project_issue_path(project, issue)
+
+ wait_for_requests
+
+ within ".js-json-table table" do
+ headers = all("thead th").collect { |column| column.text.strip }
+ data = all("tbody td").collect { |column| column.text.strip }
+
+ expect(headers).to eql(%w[AA BB])
+ expect(data).to eql(%w[11 22 211 222])
+ end
+ end
+end
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index fafaea8ac68..f892b01e624 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
write_diff_comment
- visit_overview
+ visit_overview_with_pending_comment
end
it 'can add comment to review' do
@@ -232,6 +232,14 @@ RSpec.describe 'Merge request > Batch comments', :js do
wait_for_requests
end
+ def visit_overview_with_pending_comment
+ accept_alert do
+ visit project_merge_request_path(merge_request.project, merge_request)
+ end
+
+ wait_for_requests
+ end
+
def write_diff_comment(**params)
click_diff_line(find_by_scrolling("[id='#{sample_compare.changes[0][:line_code]}']"))
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index a98bfd1c8a4..39d948bb6fb 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork', :js, :sidekiq_might_not_need_inline do
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
include ProjectForksHelper
let(:user) { create(:user, username: 'the-maintainer') }
let(:target_project) { create(:project, :public, :repository) }
@@ -40,12 +41,13 @@ RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork
end
it 'allows committing to the source branch' do
- execute_script("monaco.editor.getModels()[0].setValue('Updated the readme')")
+ content = 'Updated the readme'
+ editor_set_value(content)
click_button 'Commit changes'
wait_for_requests
expect(page).to have_content('Your changes have been successfully committed')
- expect(page).to have_content('Updated the readme')
+ expect(page).to have_content(content)
end
end
diff --git a/spec/features/merge_request/user_approves_spec.rb b/spec/features/merge_request/user_approves_spec.rb
index 4f7bcb58551..9670012803e 100644
--- a/spec/features/merge_request/user_approves_spec.rb
+++ b/spec/features/merge_request/user_approves_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Merge request > User approves', :js do
def verify_approvals_count_on_index!
visit(project_merge_requests_path(project, state: :all))
- expect(page.all('li').any? { |item| item["title"] == "1 approver (you've approved)"}).to be true
+ expect(page.all('li').any? { |item| item["title"] == "1 approver (you've approved)" }).to be true
visit project_merge_request_path(project, merge_request)
end
diff --git a/spec/features/merge_request/user_comments_on_merge_request_spec.rb b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
index 43096f8e7f9..dbcfc2b968f 100644
--- a/spec/features/merge_request/user_comments_on_merge_request_spec.rb
+++ b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
@@ -51,6 +51,45 @@ RSpec.describe 'User comments on a merge request', :js do
expect(page).to have_button('Resolve thread')
end
+ array = [':', '@', '#', '%', '!', '~', '$', '[contact:']
+ array.each do |x|
+ it 'handles esc key correctly when atwho is active' do
+ page.within('.js-main-target-form') do
+ fill_in('note[note]', with: 'comment 1')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.note') do
+ click_button('Reply to comment')
+ fill_in('note[note]', with: x)
+ send_keys :escape
+ end
+
+ wait_for_requests
+ expect(page.html).not_to include('Are you sure you want to cancel creating this comment?')
+ end
+ end
+
+ it 'handles esc key correctly when atwho is not active' do
+ page.within('.js-main-target-form') do
+ fill_in('note[note]', with: 'comment 1')
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.note') do
+ click_button('Reply to comment')
+ fill_in('note[note]', with: 'comment 2')
+ send_keys :escape
+ end
+
+ wait_for_requests
+ expect(page.html).to include('Are you sure you want to cancel creating this comment?')
+ end
+
it 'loads new comment' do
# Add new comment in background in order to check
# if it's going to be loaded automatically for current user.
diff --git a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
index 059e1eb89c5..f0c0142a6cc 100644
--- a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
+++ b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Merge request < User customizes merge commit message', :js do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
- let(:issue_1) { create(:issue, project: project)}
- let(:issue_2) { create(:issue, project: project)}
+ let(:issue_1) { create(:issue, project: project) }
+ let(:issue_2) { create(:issue, project: project) }
let(:source_branch) { 'csv' }
let(:target_branch) { 'master' }
let(:squash) { false }
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index 92b9b785148..0dd87ac3e24 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Merge request > User edits assignees sidebar', :js do
context 'when GraphQL assignees widget feature flag is enabled' do
let(:sidebar_assignee_dropdown_item) { sidebar_assignee_block.find(".dropdown-item", text: assignee.username ) }
- let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item['title']}
+ let(:sidebar_assignee_dropdown_tooltip) { sidebar_assignee_dropdown_item['title'] }
context 'when user is an owner' do
before do
diff --git a/spec/features/merge_request/user_merges_merge_request_spec.rb b/spec/features/merge_request/user_merges_merge_request_spec.rb
index 6a9a30953df..c91dc7b1c00 100644
--- a/spec/features/merge_request/user_merges_merge_request_spec.rb
+++ b/spec/features/merge_request/user_merges_merge_request_spec.rb
@@ -21,27 +21,6 @@ RSpec.describe "User merges a merge request", :js do
end
end
- context "ff-only merge" do
- let(:project) { create(:project, :public, :repository, merge_requests_ff_only_enabled: true) }
-
- before do
- stub_feature_flags(restructured_mr_widget: false)
- visit(merge_request_path(merge_request))
- end
-
- context "when branch is rebased" do
- let!(:merge_request) { create(:merge_request, :rebased, source_project: project) }
-
- it_behaves_like "fast forward merge a merge request"
- end
-
- context "when branch is merged" do
- let!(:merge_request) { create(:merge_request, :merged_target, source_project: project) }
-
- it_behaves_like "fast forward merge a merge request"
- end
- end
-
context 'sidebar merge requests counter' do
let(:project) { create(:project, :public, :repository) }
let!(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_opens_context_commits_modal_spec.rb b/spec/features/merge_request/user_opens_context_commits_modal_spec.rb
new file mode 100644
index 00000000000..2d574e57fe9
--- /dev/null
+++ b/spec/features/merge_request/user_opens_context_commits_modal_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Merge request > Context commits', :js do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+ let(:merge_request) { create(:merge_request, source_project: project) }
+
+ before do
+ project.add_developer(user)
+
+ sign_in(user)
+
+ visit commits_project_merge_request_path(project, merge_request)
+
+ wait_for_requests
+ end
+
+ it 'opens modal' do
+ click_button 'Add previously merged commits'
+
+ expect(page).to have_selector('#add-review-item')
+ expect(page).to have_content('Add or remove previously merged commits')
+ end
+end
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index d461170c990..1eebb6c2e28 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -19,7 +19,6 @@ RSpec.describe 'Merge request > User posts diff notes', :js do
project.add_developer(user)
sign_in(user)
- stub_const('Gitlab::QueryLimiting::Transaction::THRESHOLD', 104)
end
context 'when hovering over a parallel view diff file' do
diff --git a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
index 7b7fff5c936..f56db3d3dbe 100644
--- a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
+++ b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe 'Merge request > User sees closing issues message', :js do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
- let(:issue_1) { create(:issue, project: project)}
- let(:issue_2) { create(:issue, project: project)}
+ let(:issue_1) { create(:issue, project: project) }
+ let(:issue_2) { create(:issue, project: project) }
let(:merge_request) do
create(
:merge_request,
diff --git a/spec/features/merge_request/user_sees_deployment_widget_spec.rb b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
index e045f11c0d8..c02149eed87 100644
--- a/spec/features/merge_request/user_sees_deployment_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_deployment_widget_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe 'Merge request > User sees deployment widget', :js do
let(:ref) { merge_request.target_branch }
let(:sha) { project.commit(ref).id }
let(:pipeline) { create(:ci_pipeline, sha: sha, project: project, ref: ref) }
- let!(:manual) { }
+ let!(:manual) {}
let(:build) { create(:ci_build, :with_deployment, environment: environment.name, pipeline: pipeline) }
let!(:deployment) { build.deployment }
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 50f4cce5c23..2e65183d26f 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Merge request > User sees diff', :js do
context 'when file contains html' do
let(:current_user) { project.first_owner }
- let(:branch_name) {"test_branch"}
+ let(:branch_name) { "test_branch" }
it 'escapes any HTML special characters in the diff chunk header' do
file_content =
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index 09c6b6bce3b..2a1b9ea6009 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
}
end
- let(:expected_detached_mr_tag) {'merge request'}
+ let(:expected_detached_mr_tag) { 'merge request' }
before do
stub_application_setting(auto_devops_enabled: false)
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 16b1de0393f..11e542916f9 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -78,9 +78,18 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
it 'user visits merge request page' do
page.within('.merge-request-tabs') do
- expect(page).to have_no_link('Pipelines')
+ expect(page).to have_link('Pipelines')
end
end
+
+ it 'shows empty state with run pipeline button' do
+ page.within('.merge-request-tabs') do
+ click_link('Pipelines')
+ end
+
+ expect(page).to have_content('There are currently no pipelines.')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
+ end
end
end
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index 2c2a2dfd4a8..0e86e970f46 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -232,7 +232,7 @@ RSpec.describe 'Merge request > User sees versions', :js do
end
it 'only shows diffs from the commit' do
- diff_commit_ids = find_all('.diff-file [data-commit-id]').map {|diff| diff['data-commit-id']}
+ diff_commit_ids = find_all('.diff-file [data-commit-id]').map { |diff| diff['data-commit-id'] }
expect(diff_commit_ids).not_to be_empty
expect(diff_commit_ids).to all(eq(params[:commit_id]))
diff --git a/spec/features/merge_request/user_uses_quick_actions_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb
index b48659353ec..563120fc8b7 100644
--- a/spec/features/merge_request/user_uses_quick_actions_spec.rb
+++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Merge request > User uses quick actions', :js do
let!(:label_feature) { create(:label, project: project, title: 'feature') }
let!(:milestone) { create(:milestone, project: project, title: 'ASAP') }
let(:issuable) { create(:merge_request, source_project: project) }
- let(:source_issuable) { create(:issue, project: project, milestone: milestone, labels: [label_bug, label_feature])}
+ let(:source_issuable) { create(:issue, project: project, milestone: milestone, labels: [label_bug, label_feature]) }
it_behaves_like 'close quick action', :merge_request
it_behaves_like 'issuable time tracker', :merge_request
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index fa866beb773..cf9760bcd7f 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe 'Merge requests > User mass updates', :js do
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
before do
- stub_feature_flags(mr_attention_requests: false)
-
project.add_maintainer(user)
project.add_maintainer(user2)
sign_in(user)
@@ -63,18 +61,6 @@ RSpec.describe 'Merge requests > User mass updates', :js do
expect(find('.merge-request')).to have_link "Assigned to #{user.name}"
end
-
- describe 'with attention requests feature flag on' do
- before do
- stub_feature_flags(mr_attention_requests: true)
- end
-
- it 'updates merge request with assignee' do
- change_assignee(user2.name)
-
- expect(find('.issuable-meta a.author-link')[:title]).to eq "Attention requested from assignee #{user2.name}"
- end
- end
end
describe 'remove assignee' do
diff --git a/spec/features/oauth_registration_spec.rb b/spec/features/oauth_registration_spec.rb
index 18dd10755b1..cb8343b8065 100644
--- a/spec/features/oauth_registration_spec.rb
+++ b/spec/features/oauth_registration_spec.rb
@@ -85,7 +85,46 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection do
expect(page).to have_content('Please complete your profile with email address')
end
end
+
+ context 'when registering via an invitation email' do
+ let_it_be(:owner) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'Owned') }
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+
+ let(:invite_email) { generate(:email) }
+ let(:extra_params) { { invite_type: Emails::Members::INITIAL_INVITE } }
+ let(:group_invite) do
+ create(
+ :group_member, :invited,
+ group: group,
+ invite_email: invite_email,
+ created_by: owner
+ )
+ end
+
+ before do
+ project.add_maintainer(owner)
+ group.add_owner(owner)
+ group_invite.generate_invite_token!
+
+ mock_auth_hash(provider, uid, invite_email, additional_info: additional_info)
+ end
+
+ it 'redirects to the activity page with all the projects/groups invitations accepted' do
+ visit invite_path(group_invite.raw_invite_token, extra_params)
+ click_link_or_button "oauth-login-#{provider}"
+ fill_in_welcome_form
+
+ expect(page).to have_content('You have been granted Owner access to group Owned.')
+ expect(page).to have_current_path(activity_group_path(group), ignore_query: true)
+ end
+ end
end
end
end
+
+ def fill_in_welcome_form
+ select 'Software Developer', from: 'user_role'
+ click_button 'Get started!'
+ end
end
diff --git a/spec/features/populate_new_pipeline_vars_with_params_spec.rb b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
index 937f99558ad..744543d1252 100644
--- a/spec/features/populate_new_pipeline_vars_with_params_spec.rb
+++ b/spec/features/populate_new_pipeline_vars_with_params_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
it "var[key1]=value1 populates env_var variable correctly" do
page.within(all("[data-testid='ci-variable-row']")[0]) do
- expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('env_var')
expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key1')
expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value1')
end
@@ -24,7 +23,6 @@ RSpec.describe "Populate new pipeline CI variables with url params", :js do
it "file_var[key2]=value2 populates file variable correctly" do
page.within(all("[data-testid='ci-variable-row']")[1]) do
- expect(find("[data-testid='pipeline-form-ci-variable-type']").value).to eq('file')
expect(find("[data-testid='pipeline-form-ci-variable-key']").value).to eq('key2')
expect(find("[data-testid='pipeline-form-ci-variable-value']").value).to eq('value2')
end
diff --git a/spec/features/profile_spec.rb b/spec/features/profile_spec.rb
index 1013937ebb9..2836ac2f801 100644
--- a/spec/features/profile_spec.rb
+++ b/spec/features/profile_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Profile account page', :js do
it 'deletes user', :js, :sidekiq_might_not_need_inline do
click_button 'Delete account'
- fill_in 'password', with: '12345678'
+ fill_in 'password', with: user.password
page.within '.modal' do
click_button 'Delete account'
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 07dfbca8cbd..1d0db488751 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe 'Profile > Password' do
end
context 'Password authentication enabled' do
+ let(:new_password) { User.random_password }
let(:user) { create(:user, password_automatically_set: true) }
before do
@@ -23,7 +24,7 @@ RSpec.describe 'Profile > Password' do
context 'User with password automatically set' do
describe 'User puts different passwords in the field and in the confirmation' do
it 'shows an error message' do
- fill_passwords('mypassword', 'mypassword2')
+ fill_passwords(new_password, "#{new_password}2")
page.within('.gl-alert-danger') do
expect(page).to have_content("Password confirmation doesn't match Password")
@@ -31,7 +32,7 @@ RSpec.describe 'Profile > Password' do
end
it 'does not contain the current password field after an error' do
- fill_passwords('mypassword', 'mypassword2')
+ fill_passwords(new_password, "#{new_password}2")
expect(page).to have_no_field('user[current_password]')
end
@@ -39,7 +40,7 @@ RSpec.describe 'Profile > Password' do
describe 'User puts the same passwords in the field and in the confirmation' do
it 'shows a success message' do
- fill_passwords('mypassword', 'mypassword')
+ fill_passwords(new_password, new_password)
page.within('[data-testid="alert-info"]') do
expect(page).to have_content('Password was successfully updated. Please sign in again.')
@@ -79,7 +80,7 @@ RSpec.describe 'Profile > Password' do
end
context 'Change password' do
- let(:new_password) { '22233344' }
+ let(:new_password) { User.random_password }
before do
sign_in(user)
@@ -156,6 +157,8 @@ RSpec.describe 'Profile > Password' do
end
context 'when password is expired' do
+ let(:new_password) { User.random_password }
+
before do
sign_in(user)
@@ -170,8 +173,8 @@ RSpec.describe 'Profile > Password' do
expect(page).to have_current_path new_profile_password_path, ignore_query: true
fill_in :user_password, with: user.password
- fill_in :user_new_password, with: '12345678'
- fill_in :user_password_confirmation, with: '12345678'
+ fill_in :user_new_password, with: new_password
+ fill_in :user_password_confirmation, with: new_password
click_button 'Set new password'
expect(page).to have_current_path new_user_session_path, ignore_query: true
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index bca1bc4df4d..088c8a7a15a 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -146,12 +146,6 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
end
end
- it 'pushes `personal_access_tokens_scoped_to_projects` feature flag to the frontend' do
- visit profile_personal_access_tokens_path
-
- expect(page).to have_pushed_frontend_feature_flags(personalAccessTokensScopedToProjects: true)
- end
-
it "prefills token details" do
name = 'My PAT'
scopes = 'api,read_user'
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index 4b6ed458c68..2f7b722f553 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -294,7 +294,7 @@ RSpec.describe 'User edit profile' do
end
context 'user menu' do
- let(:issue) { create(:issue, project: project)}
+ let(:issue) { create(:issue, project: project) }
let(:project) { create(:project) }
def open_modal(button_text)
@@ -536,7 +536,7 @@ RSpec.describe 'User edit profile' do
end
context 'User time preferences', :js do
- let(:issue) { create(:issue, project: project)}
+ let(:issue) { create(:issue, project: project) }
let(:project) { create(:project) }
before do
diff --git a/spec/features/profiles/user_visits_profile_spec.rb b/spec/features/profiles/user_visits_profile_spec.rb
index 8b1af283765..7dd2e6aafa3 100644
--- a/spec/features/profiles/user_visits_profile_spec.rb
+++ b/spec/features/profiles/user_visits_profile_spec.rb
@@ -97,8 +97,8 @@ RSpec.describe 'User visits their profile' do
let_it_be(:storage_enforcement_date) { Date.today + 30 }
before do
- allow_next_found_instance_of(Namespaces::UserNamespace) do |g|
- allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |user_namespace|
+ allow(user_namespace).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
end
@@ -115,8 +115,8 @@ RSpec.describe 'User visits their profile' do
expect_page_not_to_have_storage_enforcement_banner
storage_enforcement_date = Date.today + 13
- allow_next_found_instance_of(Namespaces::UserNamespace) do |g|
- allow(g).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |user_namespace|
+ allow(user_namespace).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
page.refresh
expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
@@ -124,8 +124,12 @@ RSpec.describe 'User visits their profile' do
end
context 'with storage_enforcement_date not set' do
- # This test should break and be rewritten after the implementation of the storage_enforcement_date
- # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ before do
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |user_namespace|
+ allow(user_namespace).to receive(:storage_enforcement_date).and_return(nil)
+ end
+ end
+
it 'does not display the banner in the group page' do
visit(profile_path)
expect_page_not_to_have_storage_enforcement_banner
@@ -134,10 +138,10 @@ RSpec.describe 'User visits their profile' do
end
def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
- expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ expect(page).to have_text "Effective #{storage_enforcement_date}, namespace storage limits will apply"
end
def expect_page_not_to_have_storage_enforcement_banner
- expect(page).not_to have_text "storage limits will apply to this namespace"
+ expect(page).not_to have_text "namespace storage limits will apply"
end
end
diff --git a/spec/features/projects/blobs/blob_show_spec.rb b/spec/features/projects/blobs/blob_show_spec.rb
index f5cafa2b2ec..13a4c1b5912 100644
--- a/spec/features/projects/blobs/blob_show_spec.rb
+++ b/spec/features/projects/blobs/blob_show_spec.rb
@@ -137,7 +137,7 @@ RSpec.describe 'File blob', :js do
context 'when ref switch' do
def switch_ref_to(ref_name)
- first('.qa-branches-select').click # rubocop:disable QA/SelectorUsage
+ first('[data-testid="branches-select"]').click
page.within '.project-refs-form' do
click_link ref_name
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 54176378de8..f198a1f42e2 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Editing file blob', :js do
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
include TreeHelper
include BlobSpecHelpers
@@ -42,7 +43,7 @@ RSpec.describe 'Editing file blob', :js do
def fill_editor(content: 'class NextFeature\\nend\\n')
wait_for_requests
- execute_script("monaco.editor.getModels()[0].setValue('#{content}')")
+ editor_set_value(content)
end
context 'from MR diff' do
@@ -98,10 +99,8 @@ RSpec.describe 'Editing file blob', :js do
click_link 'Preview changes'
wait_for_requests
- old_line_count = page.all('.line_holder.old').size
new_line_count = page.all('.line_holder.new').size
- expect(old_line_count).to be > 0
expect(new_line_count).to be > 0
end
end
diff --git a/spec/features/projects/ci/lint_spec.rb b/spec/features/projects/ci/lint_spec.rb
index 7f10c6afcd5..608511ae5a5 100644
--- a/spec/features/projects/ci/lint_spec.rb
+++ b/spec/features/projects/ci/lint_spec.rb
@@ -16,16 +16,13 @@ RSpec.describe 'CI Lint', :js do
visit project_ci_lint_path(project)
editor_set_value(yaml_content)
-
- wait_for('YAML content') do
- find(content_selector).text.present?
- end
end
describe 'YAML parsing' do
shared_examples 'validates the YAML' do
before do
click_on 'Validate'
+ scroll_to(page.find('[data-testid="ci-lint-status"]'))
end
context 'YAML is correct' do
diff --git a/spec/features/projects/ci/secure_files_spec.rb b/spec/features/projects/ci/secure_files_spec.rb
deleted file mode 100644
index 412330eb5d6..00000000000
--- a/spec/features/projects/ci/secure_files_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Secure Files', :js do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
-
- before do
- stub_feature_flags(ci_secure_files_read_only: false)
- project.add_maintainer(user)
- sign_in(user)
- end
-
- it 'user sees the Secure Files list component' do
- visit project_ci_secure_files_path(project)
- expect(page).to have_content('There are no secure files yet.')
- end
-
- it 'prompts the user to confirm before deleting a file' do
- file = create(:ci_secure_file, project: project)
-
- visit project_ci_secure_files_path(project)
-
- expect(page).to have_content(file.name)
-
- find('button.btn-danger').click
-
- expect(page).to have_content("Delete #{file.name}?")
-
- click_on('Delete secure file')
-
- visit project_ci_secure_files_path(project)
-
- expect(page).not_to have_content(file.name)
- end
-
- it 'displays an uploaded file in the file list' do
- visit project_ci_secure_files_path(project)
- expect(page).to have_content('There are no secure files yet.')
-
- page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
- click_button 'Upload File'
- end
-
- expect(page).to have_content('upload-keystore.jks')
- end
-
- it 'displays an error when a duplicate file upload is attempted' do
- create(:ci_secure_file, project: project, name: 'upload-keystore.jks')
- visit project_ci_secure_files_path(project)
-
- expect(page).to have_content('upload-keystore.jks')
-
- page.attach_file('spec/fixtures/ci_secure_files/upload-keystore.jks') do
- click_button 'Upload File'
- end
-
- expect(page).to have_content('A file with this name already exists.')
- end
-end
diff --git a/spec/features/projects/cluster_agents_spec.rb b/spec/features/projects/cluster_agents_spec.rb
index 5d931afe4a7..8c557a9c37a 100644
--- a/spec/features/projects/cluster_agents_spec.rb
+++ b/spec/features/projects/cluster_agents_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'ClusterAgents', :js do
- let_it_be(:token) { create(:cluster_agent_token, description: 'feature test token')}
+ let_it_be(:token) { create(:cluster_agent_token, description: 'feature test token') }
let(:agent) { token.agent }
let(:project) { agent.project }
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 863fdbdadaa..2719316c5dc 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe 'User browses commits' do
let(:ref) { project.repository.root_ref }
let(:newrev) { project.repository.commit('master').sha }
let(:short_newrev) { project.repository.commit('master').short_id }
- let(:message) { 'Glob characters'}
+ let(:message) { 'Glob characters' }
before do
create_file_in_repo(project, ref, ref, filename, 'Test file', commit_message: message)
diff --git a/spec/features/projects/compare_spec.rb b/spec/features/projects/compare_spec.rb
index bc3ef2af9b0..22b0f344606 100644
--- a/spec/features/projects/compare_spec.rb
+++ b/spec/features/projects/compare_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe "Compare", :js do
click_button('Compare')
- page.within('.gl-alert') do
+ page.within('[data-testid="too-many-changes-alert"]') do
expect(page).to have_text("Too many changes to show. To preserve performance only 3 of 3+ files are displayed.")
end
end
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 951b24eafac..a53e8beb555 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -18,10 +18,10 @@ RSpec.describe 'Environment' do
describe 'environment details page' do
let!(:environment) { create(:environment, project: project) }
- let!(:permissions) { }
- let!(:deployment) { }
- let!(:action) { }
- let!(:cluster) { }
+ let!(:permissions) {}
+ let!(:deployment) {}
+ let!(:action) {}
+ let!(:cluster) {}
context 'with auto-stop' do
let!(:environment) { create(:environment, :will_auto_stop, name: 'staging', project: project) }
diff --git a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
index 6b1e60db5b1..0ad44f31a52 100644
--- a/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
+++ b/spec/features/projects/files/project_owner_sees_link_to_create_license_file_in_empty_project_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Projects > Files > Project owner sees a link to create a license
expect(page).to have_current_path("/-/ide/project/#{project.full_path}/edit/master/-/LICENSE", ignore_query: true)
- expect(page).to have_selector('.qa-file-templates-bar') # rubocop:disable QA/SelectorUsage
+ expect(page).to have_selector('[data-testid="file-templates-bar"]')
select_template('MIT License')
diff --git a/spec/features/projects/files/user_browses_files_spec.rb b/spec/features/projects/files/user_browses_files_spec.rb
index 53fdd5a15dd..0f3ce5a2bad 100644
--- a/spec/features/projects/files/user_browses_files_spec.rb
+++ b/spec/features/projects/files/user_browses_files_spec.rb
@@ -348,7 +348,7 @@ RSpec.describe "User browses files", :js do
end
it "shows raw file content in a new tab" do
- new_tab = window_opened_by {click_link 'Open raw'}
+ new_tab = window_opened_by { click_link 'Open raw' }
within_window new_tab do
expect(page).to have_content("Test file")
@@ -366,7 +366,7 @@ RSpec.describe "User browses files", :js do
end
it "shows raw file content in a new tab" do
- new_tab = window_opened_by {click_link 'Open raw'}
+ new_tab = window_opened_by { click_link 'Open raw' }
within_window new_tab do
expect(page).to have_content("*.rbc")
diff --git a/spec/features/projects/files/user_creates_files_spec.rb b/spec/features/projects/files/user_creates_files_spec.rb
index 7344c91b6dc..a81f31d663e 100644
--- a/spec/features/projects/files/user_creates_files_spec.rb
+++ b/spec/features/projects/files/user_creates_files_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User creates files', :js do
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
include BlobSpecHelpers
let(:fork_message) do
@@ -89,8 +90,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
end
it 'creates and commit a new file' do
- find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ editor_set_value('*.rbca')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -107,8 +107,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
it 'creates and commit a new file with new lines at the end of file' do
set_default_button('edit')
- find('#editor')
- execute_script('monaco.editor.getModels()[0].setValue("Sample\n\n\n")')
+ editor_set_value('Sample\n\n\n')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -119,8 +118,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
click_link('Edit')
- find('#editor')
- expect(evaluate_script('monaco.editor.getModels()[0].getValue()')).to eq("Sample\n\n\n")
+ expect(find('.monaco-editor')).to have_content('Sample\n\n\n')
end
it 'creates and commit a new file with a directory name' do
@@ -128,8 +126,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
expect(page).to have_selector('.file-editor')
- find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ editor_set_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -143,8 +140,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
it 'creates and commit a new file specifying a new branch' do
expect(page).to have_selector('.file-editor')
- find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ editor_set_value('*.rbca')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
fill_in(:branch_name, with: 'new_branch_name', visible: true)
@@ -178,8 +174,7 @@ RSpec.describe 'Projects > Files > User creates files', :js do
it 'creates and commit new file in forked project' do
expect(page).to have_selector('.file-editor')
- find('#editor')
- execute_script("monaco.editor.getModels()[0].setValue('*.rbca')")
+ editor_set_value('*.rbca')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
diff --git a/spec/features/projects/files/user_edits_files_spec.rb b/spec/features/projects/files/user_edits_files_spec.rb
index 1ac45970828..d7460538be9 100644
--- a/spec/features/projects/files/user_edits_files_spec.rb
+++ b/spec/features/projects/files/user_edits_files_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User edits files', :js do
+ include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
include ProjectForksHelper
include BlobSpecHelpers
@@ -50,10 +51,9 @@ RSpec.describe 'Projects > Files > User edits files', :js do
click_link_or_button('Edit')
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
- expect(editor_value).to eq('*.rbca')
+ expect(find('.monaco-editor')).to have_content('*.rbca')
end
it 'does not show the edit link if a file is binary' do
@@ -72,8 +72,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
click_link_or_button('Edit')
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -91,8 +90,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
fill_in(:branch_name, with: 'new_branch_name', visible: true)
click_button('Commit changes')
@@ -110,8 +108,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
click_link_or_button('Edit')
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
click_link('Preview changes')
expect(page).to have_css('.line_holder.new')
@@ -156,10 +153,9 @@ RSpec.describe 'Projects > Files > User edits files', :js do
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
- expect(editor_value).to eq('*.rbca')
+ expect(find('.monaco-editor')).to have_content('*.rbca')
end
it 'opens the Web IDE in a forked project', :sidekiq_might_not_need_inline do
@@ -187,8 +183,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
find('.file-editor', match: :first)
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -216,8 +211,7 @@ RSpec.describe 'Projects > Files > User edits files', :js do
expect(page).not_to have_link('Fork')
- find('#editor')
- set_editor_value('*.rbca')
+ editor_set_value('*.rbca')
fill_in(:commit_message, with: 'Another commit', visible: true)
click_button('Commit changes')
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index 12c5820a69d..ac83de3e765 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -90,6 +90,34 @@ RSpec.describe 'issuable templates', :js do
end
end
+ context 'user creates an issue with a default template from the repo' do
+ let(:template_content) { 'this is the default template' }
+
+ before do
+ project.repository.create_file(
+ user,
+ '.gitlab/issue_templates/default.md',
+ template_content,
+ message: 'added default issue template',
+ branch_name: 'master'
+ )
+ end
+
+ it 'does not overwrite autosaved description' do
+ visit new_project_issue_path project
+ wait_for_requests
+
+ assert_template # default template is loaded the first time
+
+ fill_in 'issue_description', with: 'my own description', fill_options: { clear: :backspace }
+
+ visit new_project_issue_path project
+ wait_for_requests
+
+ assert_template(expected_content: 'my own description')
+ end
+ end
+
context 'user creates a merge request using templates' do
let(:template_content) { 'this is a test "feature-proposal" template' }
let(:bug_template_content) { 'this is merge request bug template' }
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index bb44b70bb3a..289ab8cffa5 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -74,6 +74,7 @@ RSpec.describe 'User browses jobs' do
wait_for_requests
expect(page).to have_selector('.ci-canceled')
+ expect(page).not_to have_selector('[data-testid="jobs-table-error-alert"]')
end
end
diff --git a/spec/features/projects/members/manage_groups_spec.rb b/spec/features/projects/members/manage_groups_spec.rb
index 006fa3b6eff..e86affbbca1 100644
--- a/spec/features/projects/members/manage_groups_spec.rb
+++ b/spec/features/projects/members/manage_groups_spec.rb
@@ -162,7 +162,7 @@ RSpec.describe 'Project > Members > Manage groups', :js do
let_it_be(:user) { maintainer }
let_it_be(:group) { parent_group }
let_it_be(:group_within_hierarchy) { create(:group, parent: group) }
- let_it_be(:project_within_hierarchy) { create(:project, group: group_within_hierarchy)}
+ let_it_be(:project_within_hierarchy) { create(:project, group: group_within_hierarchy) }
let_it_be(:members_page_path) { project_project_members_path(project) }
let_it_be(:members_page_path_within_hierarchy) { project_project_members_path(project_within_hierarchy) }
end
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 8d229530ef5..56eb02607a5 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -12,106 +12,188 @@ RSpec.describe 'Projects > Members > Manage members', :js do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :internal, namespace: group) }
+ let(:project_owner) { create(:user, name: "ProjectOwner", username: "project_owner") }
+ let(:project_maintainer) { create(:user, name: "ProjectMaintainer", username: "project_maintainer") }
+ let(:group_owner) { user1 }
+ let(:project_developer) { user2 }
+
before do
- sign_in(user1)
- group.add_owner(user1)
+ project.add_maintainer(project_maintainer)
+ project.add_owner(project_owner)
+ group.add_owner(group_owner)
+
+ sign_in(group_owner)
end
it 'show members from project and group', :aggregate_failures do
- project.add_developer(user2)
+ project.add_developer(project_developer)
visit_members_page
- expect(first_row).to have_content(user1.name)
- expect(second_row).to have_content(user2.name)
+ expect(first_row).to have_content(group_owner.name)
+ expect(second_row).to have_content(project_developer.name)
end
it 'show user once if member of both group and project', :aggregate_failures do
- project.add_developer(user1)
+ group.add_reporter(project_maintainer)
visit_members_page
- expect(first_row).to have_content(user1.name)
- expect(second_row).to be_blank
+ expect(first_row).to have_content(group_owner.name)
+ expect(second_row).to have_content(project_maintainer.name)
+ expect(third_row).to have_content(project_owner.name)
+ expect(all_rows[3]).to be_blank
end
- it 'update user access level' do
- project.add_developer(user2)
+ context 'update user access level' do
+ before do
+ sign_in(current_user)
+ end
+
+ context 'as maintainer' do
+ let(:current_user) { project_maintainer }
- visit_members_page
+ it 'can update a non-Owner member' do
+ project.add_developer(project_developer)
- page.within find_member_row(user2) do
- click_button('Developer')
- click_button('Reporter')
+ visit_members_page
+
+ page.within find_member_row(project_developer) do
+ click_button('Developer')
+
+ page.within '.dropdown-menu' do
+ expect(page).not_to have_button('Owner')
+ end
+
+ click_button('Reporter')
+
+ expect(page).to have_button('Reporter')
+ end
+ end
- expect(page).to have_button('Reporter')
+ it 'cannot update an Owner member' do
+ visit_members_page
+
+ page.within find_member_row(project_owner) do
+ expect(page).not_to have_button('Owner')
+ end
+ end
end
- end
- context 'when owner' do
- it 'uses ProjectMember access_level_roles for the invite members modal access option', :aggregate_failures do
- visit_members_page
+ context 'as owner' do
+ let(:current_user) { group_owner }
- click_on 'Invite members'
+ it 'can update a project Owner member' do
+ visit_members_page
- click_on 'Guest'
- wait_for_requests
+ page.within find_member_row(project_owner) do
+ click_button('Owner')
+ click_button('Reporter')
- page.within '.dropdown-menu' do
- expect(page).to have_button('Guest')
- expect(page).to have_button('Reporter')
- expect(page).to have_button('Developer')
- expect(page).to have_button('Maintainer')
- expect(page).to have_button('Owner')
+ expect(page).to have_button('Reporter')
+ end
end
end
end
- context 'when maintainer' do
- let(:maintainer) { create(:user) }
-
+ context 'uses ProjectMember valid_access_level_roles for the invite members modal options', :aggregate_failures do
before do
- project.add_maintainer(maintainer)
- sign_in(maintainer)
- end
+ sign_in(current_user)
- it 'uses ProjectMember access_level_roles for the invite members modal access option', :aggregate_failures do
visit_members_page
click_on 'Invite members'
click_on 'Guest'
wait_for_requests
+ end
- page.within '.dropdown-menu' do
- expect(page).to have_button('Guest')
- expect(page).to have_button('Reporter')
- expect(page).to have_button('Developer')
- expect(page).to have_button('Maintainer')
- expect(page).not_to have_button('Owner')
+ context 'when owner' do
+ let(:current_user) { project_owner }
+
+ it 'shows Owner in the dropdown' do
+ page.within '.dropdown-menu' do
+ expect(page).to have_button('Guest')
+ expect(page).to have_button('Reporter')
+ expect(page).to have_button('Developer')
+ expect(page).to have_button('Maintainer')
+ expect(page).to have_button('Owner')
+ end
+ end
+ end
+
+ context 'when maintainer' do
+ let(:current_user) { project_maintainer }
+
+ it 'does not show the Owner option' do
+ page.within '.dropdown-menu' do
+ expect(page).to have_button('Guest')
+ expect(page).to have_button('Reporter')
+ expect(page).to have_button('Developer')
+ expect(page).to have_button('Maintainer')
+ expect(page).not_to have_button('Owner')
+ end
end
end
end
- it 'remove user from project' do
- other_user = create(:user)
- project.add_developer(other_user)
+ describe 'remove user from project' do
+ before do
+ project.add_developer(project_developer)
- visit_members_page
+ sign_in(current_user)
- # Open modal
- page.within find_member_row(other_user) do
- click_button 'Remove member'
+ visit_members_page
end
- within_modal do
- expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
- click_button('Remove member')
+ context 'when maintainer' do
+ let(:current_user) { project_maintainer }
+
+ it 'can only remove non-Owner members' do
+ page.within find_member_row(project_owner) do
+ expect(page).not_to have_button('Remove member')
+ end
+
+ # Open modal
+ page.within find_member_row(project_developer) do
+ click_button 'Remove member'
+ end
+
+ within_modal do
+ expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
+ click_button('Remove member')
+ end
+
+ wait_for_requests
+
+ expect(members_table).not_to have_content(project_developer.name)
+ expect(members_table).to have_content(project_owner.name)
+ end
end
- wait_for_requests
+ context 'when owner' do
+ let(:current_user) { group_owner }
+
+ it 'can remove any direct member' do
+ page.within find_member_row(project_owner) do
+ expect(page).to have_button('Remove member')
+ end
+
+ # Open modal
+ page.within find_member_row(project_owner) do
+ click_button 'Remove member'
+ end
- expect(members_table).not_to have_content(other_user.name)
+ within_modal do
+ expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
+ click_button('Remove member')
+ end
+
+ wait_for_requests
+
+ expect(members_table).not_to have_content(project_owner.name)
+ end
+ end
end
it_behaves_like 'inviting members', 'project-members-page' do
@@ -130,7 +212,7 @@ RSpec.describe 'Projects > Members > Manage members', :js do
external_project_bot = create(:user, :project_bot, name: '_external_project_bot_')
external_project = create(:project, group: external_group)
external_project.add_maintainer(external_project_bot)
- external_project.add_maintainer(user1)
+ external_project.add_maintainer(group_owner)
visit_members_page
@@ -143,8 +225,8 @@ RSpec.describe 'Projects > Members > Manage members', :js do
wait_for_requests
- expect(page).to have_content(user1.name)
- expect(page).to have_content(user2.name)
+ expect(page).to have_content(group_owner.name)
+ expect(page).to have_content(project_developer.name)
expect(page).not_to have_content(internal_project_bot.name)
expect(page).not_to have_content(external_project_bot.name)
end
@@ -155,7 +237,7 @@ RSpec.describe 'Projects > Members > Manage members', :js do
let_it_be(:project) { create(:project, :public) }
before do
- sign_out(user1)
+ sign_out(group_owner)
end
it 'does not show the Invite members button when not signed in' do
@@ -192,7 +274,7 @@ RSpec.describe 'Projects > Members > Manage members', :js do
end
it 'shows 2FA badge to user with "Maintainer" access level' do
- project.add_maintainer(user1)
+ sign_in(project_maintainer)
visit_members_page
@@ -209,7 +291,7 @@ RSpec.describe 'Projects > Members > Manage members', :js do
end
it 'does not show 2FA badge to users with access level below "Maintainer"' do
- group.add_developer(user1)
+ group.add_developer(group_owner)
visit_members_page
diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb
index 335ae6794b7..eb52a7821f9 100644
--- a/spec/features/projects/merge_request_button_spec.rb
+++ b/spec/features/projects/merge_request_button_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe 'Merge Request button' do
let(:forked_project) { fork_project(project, user, repository: true) }
shared_examples 'Merge request button only shown when allowed' do
+ let(:extra_mr_params) { {} }
+
context 'not logged in' do
it 'does not show Create merge request button' do
visit url
@@ -31,11 +33,8 @@ RSpec.describe 'Merge Request button' do
href = project_new_merge_request_path(
project,
merge_request: {
- source_project_id: project.id,
- source_branch: 'feature',
- target_project_id: project.id,
- target_branch: 'master'
- }
+ source_branch: 'feature'
+ }.merge(extra_mr_params)
)
visit url
@@ -90,11 +89,8 @@ RSpec.describe 'Merge Request button' do
href = project_new_merge_request_path(
forked_project,
merge_request: {
- source_project_id: forked_project.id,
- source_branch: 'feature',
- target_project_id: forked_project.id,
- target_branch: 'master'
- }
+ source_branch: 'feature'
+ }.merge(extra_mr_params)
)
visit fork_url
@@ -121,6 +117,7 @@ RSpec.describe 'Merge Request button' do
it_behaves_like 'Merge request button only shown when allowed' do
let(:url) { project_compare_path(project, from: 'master', to: 'feature') }
let(:fork_url) { project_compare_path(forked_project, from: 'master', to: 'feature') }
+ let(:extra_mr_params) { { target_project_id: project.id, target_branch: 'master' } }
end
it 'shows the correct merge request button when viewing across forks', :js do
@@ -128,9 +125,8 @@ RSpec.describe 'Merge Request button' do
project.add_developer(user)
href = project_new_merge_request_path(
- project,
+ forked_project,
merge_request: {
- source_project_id: forked_project.id,
source_branch: 'feature',
target_project_id: project.id,
target_branch: 'master'
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 9d2d1454d77..f45025d079a 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -3,16 +3,44 @@
require 'spec_helper'
RSpec.describe 'New project', :js do
- include Select2Helper
include Spec::Support::Helpers::Features::TopNavSpecHelpers
context 'as a user' do
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
end
+ it 'shows the project description field when it should' do
+ description_label = 'Project description (optional)'
+
+ visit new_project_path
+ click_link 'Create blank project'
+
+ page.within('#blank-project-pane') do
+ expect(page).not_to have_content(description_label)
+ end
+
+ visit new_project_path
+ click_link 'Import project'
+
+ page.within('#import-project-pane') do
+ click_button 'Repository by URL'
+
+ expect(page).to have_content(description_label)
+ end
+
+ visit new_project_path
+ click_link 'Create from template'
+
+ page.within('#create-from-template-pane') do
+ find("[data-testid='use_template_#{Gitlab::ProjectTemplate.localized_templates_table.first.name}']").click
+
+ expect(page).to have_content(description_label)
+ end
+ end
+
it 'shows a message if multiple levels are restricted' do
Gitlab::CurrentSettings.update!(
restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
diff --git a/spec/features/projects/pages/user_adds_domain_spec.rb b/spec/features/projects/pages/user_adds_domain_spec.rb
index afa3f29ce0d..5cb4fa163c8 100644
--- a/spec/features/projects/pages/user_adds_domain_spec.rb
+++ b/spec/features/projects/pages/user_adds_domain_spec.rb
@@ -5,7 +5,7 @@ RSpec.describe 'User adds pages domain', :js do
include LetsEncryptHelpers
include Spec::Support::Helpers::ModalHelpers
- let_it_be(:project) { create(:project, pages_https_only: false) }
+ let_it_be(:project) { create(:project, :pages_published, pages_https_only: false) }
let(:user) { create(:user) }
@@ -18,8 +18,6 @@ RSpec.describe 'User adds pages domain', :js do
end
context 'when pages are exposed on external HTTP address', :http_pages_enabled do
- let(:project) { create(:project, pages_https_only: false) }
-
shared_examples 'adds new domain' do
it 'adds new domain' do
visit new_project_pages_domain_path(project)
@@ -42,7 +40,7 @@ RSpec.describe 'User adds pages domain', :js do
context 'when project in group namespace' do
it_behaves_like 'adds new domain' do
let(:group) { create :group }
- let(:project) { create(:project, namespace: group, pages_https_only: false) }
+ let(:project) { create(:project, :pages_published, namespace: group, pages_https_only: false) }
end
end
diff --git a/spec/features/projects/pages/user_configures_pages_pipeline_spec.rb b/spec/features/projects/pages/user_configures_pages_pipeline_spec.rb
new file mode 100644
index 00000000000..029479d6b95
--- /dev/null
+++ b/spec/features/projects/pages/user_configures_pages_pipeline_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'Pages edits pages settings', :js do
+ include Spec::Support::Helpers::ModalHelpers
+
+ let_it_be(:project) { create(:project, pages_https_only: false) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+
+ project.add_maintainer(user)
+
+ sign_in(user)
+ end
+
+ context 'when pipeline wizard feature is enabled' do
+ before do
+ Feature.enable(:use_pipeline_wizard_for_pages)
+ end
+
+ context 'when onboarding is not complete' do
+ it 'renders onboarding instructions' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Get started with Pages')
+ end
+ end
+
+ context 'when onboarding is complete' do
+ before do
+ project.mark_pages_onboarding_complete
+ end
+
+ it 'shows waiting screen' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Waiting for the Pages Pipeline to complete...')
+ end
+ end
+ end
+
+ context 'when pipeline wizard feature is disabled' do
+ before do
+ Feature.disable(:use_pipeline_wizard_for_pages)
+ end
+
+ it 'shows configure pages instructions' do
+ visit project_pages_path(project)
+
+ expect(page).to have_content('Configure pages')
+ end
+
+ after do
+ Feature.enable(:use_pipeline_wizard_for_pages)
+ end
+ end
+end
diff --git a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
index 4c633bea64e..2e28fa20b90 100644
--- a/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb
@@ -5,7 +5,8 @@ RSpec.describe "Pages with Let's Encrypt", :https_pages_enabled do
include LetsEncryptHelpers
include Spec::Support::Helpers::ModalHelpers
- let(:project) { create(:project, pages_https_only: false) }
+ let_it_be_with_reload(:project) { create(:project, :pages_published, pages_https_only: false) }
+
let(:user) { create(:user) }
let(:role) { :maintainer }
let(:certificate_pem) { attributes_for(:pages_domain)[:certificate] }
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
index bd163f4a109..88c27a6adf2 100644
--- a/spec/features/projects/pages/user_edits_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe 'Pages edits pages settings', :js do
include Spec::Support::Helpers::ModalHelpers
- let(:project) { create(:project, pages_https_only: false) }
- let(:user) { create(:user) }
+ let_it_be_with_reload(:project) { create(:project, :pages_published, pages_https_only: false) }
+ let_it_be(:user) { create(:user) }
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
@@ -80,13 +80,6 @@ RSpec.describe 'Pages edits pages settings', :js do
end
end
- it 'does not see anything to destroy' do
- visit project_pages_path(project)
-
- expect(page).to have_content('Configure pages')
- expect(page).not_to have_link('Remove pages')
- end
-
describe 'project settings page' do
it 'renders "Pages" tab' do
visit edit_project_path(project)
@@ -151,7 +144,7 @@ RSpec.describe 'Pages edits pages settings', :js do
end
context 'non-HTTPS domain exists' do
- let(:project) { create(:project, pages_https_only: false) }
+ let(:project) { create(:project, :pages_published, pages_https_only: false) }
before do
create(:pages_domain, :without_key, :without_certificate, project: project)
diff --git a/spec/features/projects/pipeline_schedules_spec.rb b/spec/features/projects/pipeline_schedules_spec.rb
index 8cf6d5bd29b..0711a30e974 100644
--- a/spec/features/projects/pipeline_schedules_spec.rb
+++ b/spec/features/projects/pipeline_schedules_spec.rb
@@ -109,7 +109,12 @@ RSpec.describe 'Pipeline Schedules', :js do
end
it 'changes ownership of the pipeline' do
- click_link 'Take ownership'
+ click_button 'Take ownership'
+
+ page.within('#pipeline-take-ownership-modal') do
+ click_link 'Take ownership'
+ end
+
page.within('.pipeline-schedule-table-row') do
expect(page).not_to have_content('No owner')
expect(page).to have_link('Sidney Jones')
diff --git a/spec/features/projects/pipelines/legacy_pipeline_spec.rb b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
index db6feecba03..14f60dfe061 100644
--- a/spec/features/projects/pipelines/legacy_pipeline_spec.rb
+++ b/spec/features/projects/pipelines/legacy_pipeline_spec.rb
@@ -385,6 +385,37 @@ RSpec.describe 'Pipeline', :js do
end
end
+ describe 'test tabs' do
+ let(:pipeline) { create(:ci_pipeline, :with_test_reports, :with_report_results, project: project) }
+
+ before do
+ stub_feature_flags(pipeline_tabs_vue: false)
+ visit_pipeline
+ wait_for_requests
+ end
+
+ context 'with test reports' do
+ it 'shows badge counter in Tests tab' do
+ expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_report_summary.total[:count].to_s)
+ end
+
+ it 'calls summary.json endpoint', :js do
+ find('.js-tests-tab-link').click
+
+ expect(page).to have_content('Jobs')
+ expect(page).to have_selector('[data-testid="tests-detail"]', visible: :all)
+ end
+ end
+
+ context 'without test reports' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'shows zero' do
+ expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("0")
+ end
+ end
+ end
+
context 'retrying jobs' do
before do
visit_pipeline
diff --git a/spec/features/projects/pipelines/legacy_pipelines_spec.rb b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
index 15d889933bf..eb8f2de3aba 100644
--- a/spec/features/projects/pipelines/legacy_pipelines_spec.rb
+++ b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Pipelines', :js do
include Spec::Support::Helpers::ModalHelpers
let(:project) { create(:project) }
- let(:expected_detached_mr_tag) {'merge request'}
+ let(:expected_detached_mr_tag) { 'merge request' }
context 'when user is logged in' do
let(:user) { create(:user) }
@@ -727,6 +727,7 @@ RSpec.describe 'Pipelines', :js do
end
it { expect(page).to have_content('Missing CI config file') }
+
it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file' \
'is available when trying again' do
stub_ci_pipeline_to_return_yaml_file
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index a83d4191f38..cfdd851cb80 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -394,7 +394,7 @@ RSpec.describe 'Pipeline', :js do
expect(page).to have_selector('button[aria-label="Cancel downstream pipeline"]')
end
- context 'when canceling' do
+ context 'when canceling', :sidekiq_inline do
before do
find('button[aria-label="Cancel downstream pipeline"]').click
wait_for_requests
@@ -515,18 +515,17 @@ RSpec.describe 'Pipeline', :js do
let(:pipeline) { create(:ci_pipeline, :with_test_reports, :with_report_results, project: project) }
before do
- stub_feature_flags(pipeline_tabs_vue: false)
visit_pipeline
wait_for_requests
end
context 'with test reports' do
it 'shows badge counter in Tests tab' do
- expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_report_summary.total[:count].to_s)
+ expect(page.find('[data-testid="tests-counter"]').text).to eq(pipeline.test_report_summary.total[:count].to_s)
end
it 'calls summary.json endpoint', :js do
- find('.js-tests-tab-link').click
+ find('.gl-tab-nav-item', text: 'Tests').click
expect(page).to have_content('Jobs')
expect(page).to have_selector('[data-testid="tests-detail"]', visible: :all)
@@ -537,7 +536,7 @@ RSpec.describe 'Pipeline', :js do
let(:pipeline) { create(:ci_pipeline, project: project) }
it 'shows zero' do
- expect(page.find('.js-test-report-badge-counter', visible: :all).text).to eq("0")
+ expect(page.find('[data-testid="tests-counter"]', visible: :all).text).to eq("0")
end
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 785edc69623..bf521971ae0 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Pipelines', :js do
include Spec::Support::Helpers::ModalHelpers
let(:project) { create(:project) }
- let(:expected_detached_mr_tag) {'merge request'}
+ let(:expected_detached_mr_tag) { 'merge request' }
context 'when user is logged in' do
let(:user) { create(:user) }
@@ -710,6 +710,7 @@ RSpec.describe 'Pipelines', :js do
end
it { expect(page).to have_content('Missing CI config file') }
+
it 'creates a pipeline after first request failed and a valid gitlab-ci.yml file is available when trying again' do
stub_ci_pipeline_to_return_yaml_file
diff --git a/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
new file mode 100644
index 00000000000..5a50b3de772
--- /dev/null
+++ b/spec/features/projects/settings/registry_settings_cleanup_tags_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project > Settings > Packages & Registries > Container registry tag expiration policy' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
+
+ let(:container_registry_enabled) { true }
+ let(:container_registry_enabled_on_project) { ProjectFeature::ENABLED }
+
+ subject { visit cleanup_image_tags_project_settings_packages_and_registries_path(project) }
+
+ before do
+ project.project_feature.update!(container_registry_access_level: container_registry_enabled_on_project)
+ project.container_expiration_policy.update!(enabled: true)
+
+ sign_in(user)
+ stub_container_registry_config(enabled: container_registry_enabled)
+ end
+
+ context 'as owner', :js do
+ it 'shows available section' do
+ subject
+
+ expect(find('.breadcrumbs')).to have_content('Clean up image tags')
+ end
+ end
+
+ context 'when registry is disabled' do
+ let(:container_registry_enabled) { false }
+
+ it 'does not exists' do
+ subject
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled_on_project) { ProjectFeature::DISABLED }
+
+ it 'does not exists' do
+ subject
+
+ expect(page).to have_gitlab_http_status(:not_found)
+ end
+ end
+end
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 9468540736f..1fb46c669e7 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe 'Project > Settings > Packages & Registries > Container registry
subject
within '[data-testid="container-expiration-policy-project-settings"]' do
- click_button('Expand')
select('Every day', from: 'Run cleanup')
select('50 tags per image name', from: 'Keep the most recent:')
fill_in('Keep tags matching:', with: 'stable')
@@ -50,7 +49,6 @@ RSpec.describe 'Project > Settings > Packages & Registries > Container registry
subject
within '[data-testid="container-expiration-policy-project-settings"]' do
- click_button('Expand')
fill_in('Remove tags matching:', with: '*-production')
submit_button = find('[data-testid="save-button"')
@@ -76,7 +74,6 @@ RSpec.describe 'Project > Settings > Packages & Registries > Container registry
subject
within '[data-testid="container-expiration-policy-project-settings"]' do
- click_button('Expand')
expect(find('[data-testid="enable-toggle"]')).to have_content('Disabled - Tags will not be automatically deleted.')
end
end
@@ -91,7 +88,6 @@ RSpec.describe 'Project > Settings > Packages & Registries > Container registry
subject
within '[data-testid="container-expiration-policy-project-settings"]' do
- click_button('Expand')
expect(find('.gl-alert-title')).to have_content('Cleanup policy for tags is disabled')
end
end
diff --git a/spec/features/projects/settings/service_desk_setting_spec.rb b/spec/features/projects/settings/service_desk_setting_spec.rb
index 0df4bd3f0d9..86c5c3d2d8c 100644
--- a/spec/features/projects/settings/service_desk_setting_spec.rb
+++ b/spec/features/projects/settings/service_desk_setting_spec.rb
@@ -81,7 +81,7 @@ RSpec.describe 'Service Desk Setting', :js, :clean_gitlab_redis_cache do
}
end
- let_it_be_with_reload(:group) { create(:group)}
+ let_it_be_with_reload(:group) { create(:group) }
let_it_be_with_reload(:project) { create(:project, :custom_repo, group: group, files: issuable_project_template_files) }
let_it_be(:group_template_repo) { create(:project, :custom_repo, group: group, files: issuable_group_template_files) }
diff --git a/spec/features/projects/tags/user_edits_tags_spec.rb b/spec/features/projects/tags/user_edits_tags_spec.rb
index c8438b73dc3..857d0696659 100644
--- a/spec/features/projects/tags/user_edits_tags_spec.rb
+++ b/spec/features/projects/tags/user_edits_tags_spec.rb
@@ -15,6 +15,13 @@ RSpec.describe 'Project > Tags', :js do
end
shared_examples "can create and update release" do
+ it 'shows tag information' do
+ visit page_url
+
+ expect(page).to have_content 'v1.1.0'
+ expect(page).to have_content 'Version 1.1.0'
+ end
+
it 'can create new release' do
visit page_url
page.find("a[href=\"#{new_project_release_path(project, tag_name: 'v1.1.0')}\"]").click
@@ -52,71 +59,4 @@ RSpec.describe 'Project > Tags', :js do
include_examples "can create and update release"
end
-
- # TODO: remove most of these together with FF https://gitlab.com/gitlab-org/gitlab/-/issues/366244
- describe 'when opening project tags' do
- before do
- stub_feature_flags(edit_tag_release_notes_via_release_page: false)
- visit project_tags_path(project)
- end
-
- context 'page with tags list' do
- it 'shows tag name' do
- expect(page).to have_content 'v1.1.0'
- expect(page).to have_content 'Version 1.1.0'
- end
-
- it 'shows tag edit button' do
- page.within '.tags > .content-list' do
- edit_btn = page.find("li > .row-fixed-content.controls a.btn-edit[href='/#{project.full_path}/-/tags/v1.1.0/release/edit']")
-
- expect(edit_btn['href']).to end_with("/#{project.full_path}/-/tags/v1.1.0/release/edit")
- end
- end
- end
-
- context 'edit tag release notes' do
- before do
- page.find("li > .row-fixed-content.controls a.btn-edit[href='/#{project.full_path}/-/tags/v1.1.0/release/edit']").click
- end
-
- it 'shows tag name header' do
- page.within('.content') do
- expect(page.find('.sub-header-block')).to have_content 'Release notes for tag v1.1.0'
- end
- end
-
- it 'shows release notes form' do
- page.within('.content') do
- expect(page).to have_selector('form.release-form')
- end
- end
-
- it 'toolbar buttons on release notes form are functional' do
- page.within('.content form.release-form') do
- note_textarea = page.find('.js-gfm-input')
-
- # Click on Bold button
- page.find('.md-header-toolbar button:first-child').click
-
- expect(note_textarea.value).to eq('****')
- end
- end
-
- it 'release notes form shows "Attach a file" button', :js do
- page.within('.content form.release-form') do
- expect(page).to have_button('Attach a file')
- expect(page).not_to have_selector('.uploading-progress-container', visible: true)
- end
- end
-
- it 'shows "Attaching a file" message on uploading 1 file', :js, :capybara_ignore_server_errors do
- slow_requests do
- dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')], 0, false)
-
- expect(page).to have_selector('.attaching-file-message', visible: true, text: 'Attaching a file -')
- end
- end
- end
- end
end
diff --git a/spec/features/projects/tags/user_views_tag_spec.rb b/spec/features/projects/tags/user_views_tag_spec.rb
new file mode 100644
index 00000000000..3978c5b7b78
--- /dev/null
+++ b/spec/features/projects/tags/user_views_tag_spec.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'User views tag', :feature do
+ include_examples 'user views tag' do
+ let(:tag_page) { project_tag_path(project, id: tag_name) }
+ end
+end
diff --git a/spec/features/projects/tags/user_views_tags_spec.rb b/spec/features/projects/tags/user_views_tags_spec.rb
index dfb5d5d9221..d3849df023e 100644
--- a/spec/features/projects/tags/user_views_tags_spec.rb
+++ b/spec/features/projects/tags/user_views_tags_spec.rb
@@ -2,34 +2,8 @@
require 'spec_helper'
RSpec.describe 'User views tags', :feature do
- context 'with html' do
- let(:project) { create(:project, :repository, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
- let(:user) { create(:user) }
- let(:tag_name) { "stable" }
- let!(:release) { create(:release, project: project, tag: tag_name) }
-
- before do
- project.add_developer(user)
- project.repository.add_tag(user, tag_name, project.default_branch_or_main)
-
- sign_in(user)
- end
-
- shared_examples 'renders the tag index page' do
- it do
- visit project_tags_path(project)
-
- expect(page).to have_content tag_name
- end
- end
-
- it_behaves_like 'renders the tag index page'
-
- context 'when tag name contains a slash' do
- let(:tag_name) { "stable/v0.1" }
-
- it_behaves_like 'renders the tag index page'
- end
+ include_examples 'user views tag' do
+ let(:tag_page) { project_tags_path(project) }
end
context 'rss' do
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index f6127b38bd6..074469a9b55 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -49,8 +49,8 @@ RSpec.describe 'Multi-file editor new directory', :js do
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
# (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
- if page.has_css?('.qa-begin-commit-button') # rubocop:disable QA/SelectorUsage
- find('.qa-begin-commit-button').click # rubocop:disable QA/SelectorUsage
+ if page.has_css?('[data-testid="begin-commit-button"]')
+ find('[data-testid="begin-commit-button"]').click
end
fill_in('commit-message', with: 'commit message ide')
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index 33be02a9121..85c644fa528 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -39,8 +39,8 @@ RSpec.describe 'Multi-file editor new file', :js do
# Compact mode depends on the size of window. If it is shorter than MAX_WINDOW_HEIGHT_COMPACT,
# (as it is with WEBDRIVER_HEADLESS=0), this initial commit button will exist. Otherwise, if it is
# taller (as it is by default with chrome headless) then the button will not exist.
- if page.has_css?('.qa-begin-commit-button') # rubocop:disable QA/SelectorUsage
- find('.qa-begin-commit-button').click # rubocop:disable QA/SelectorUsage
+ if page.has_css?('[data-testid="begin-commit-button"]')
+ find('[data-testid="begin-commit-button"]').click
end
fill_in('commit-message', with: 'commit message ide')
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index 53e89cd2959..163e347d03d 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe 'Projects tree', :js do
let(:filename) { File.join(path, 'test.txt') }
let(:newrev) { project.repository.commit('master').sha }
let(:short_newrev) { project.repository.commit('master').short_id }
- let(:message) { 'Glob characters'}
+ let(:message) { 'Glob characters' }
before do
create_file_in_repo(project, 'master', 'master', filename, 'Test file', commit_message: message)
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index f6f9c7f0d3c..d228fb084c3 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -329,7 +329,7 @@ RSpec.describe 'Project' do
it 'has working links to submodules' do
click_link('645f6c4c')
- expect(page).to have_selector('.qa-branches-select', text: '645f6c4c82fd3f5e06f67134450a570b795e55a6') # rubocop:disable QA/SelectorUsage
+ expect(page).to have_selector('[data-testid="branches-select"]', text: '645f6c4c82fd3f5e06f67134450a570b795e55a6')
end
context 'for signed commit on default branch', :js do
@@ -454,8 +454,8 @@ RSpec.describe 'Project' do
let_it_be(:storage_enforcement_date) { Date.today + 30 }
before do
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
end
@@ -478,8 +478,8 @@ RSpec.describe 'Project' do
let_it_be(:project) { create(:project, namespace: user.namespace) }
before do
- allow_next_found_instance_of(Namespaces::UserNamespace) do |namspace|
- allow(namspace).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Namespaces::UserNamespace) do |user_namespace|
+ allow(user_namespace).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
end
@@ -490,8 +490,8 @@ RSpec.describe 'Project' do
end
it 'does not display the banner in a paid group project page' do
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:paid?).and_return(true)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:paid?).and_return(true)
end
visit project_path(project)
expect_page_not_to_have_storage_enforcement_banner
@@ -506,8 +506,8 @@ RSpec.describe 'Project' do
expect_page_not_to_have_storage_enforcement_banner
storage_enforcement_date = Date.today + 13
- allow_next_found_instance_of(Group) do |grp|
- allow(grp).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(storage_enforcement_date)
end
page.refresh
expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
@@ -515,8 +515,12 @@ RSpec.describe 'Project' do
end
context 'with storage_enforcement_date not set' do
- # This test should break and be rewritten after the implementation of the storage_enforcement_date
- # TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
+ before do
+ allow_next_found_instance_of(Group) do |group|
+ allow(group).to receive(:storage_enforcement_date).and_return(nil)
+ end
+ end
+
it 'does not display the banner in the group page' do
stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
visit project_path(project)
@@ -526,11 +530,11 @@ RSpec.describe 'Project' do
end
def expect_page_to_have_storage_enforcement_banner(storage_enforcement_date)
- expect(page).to have_text "From #{storage_enforcement_date} storage limits will apply to this namespace"
+ expect(page).to have_text "Effective #{storage_enforcement_date}, namespace storage limits will apply"
end
def expect_page_not_to_have_storage_enforcement_banner
- expect(page).not_to have_text "storage limits will apply to this namespace"
+ expect(page).not_to have_text "namespace storage limits will apply"
end
def remove_with_confirm(button_text, confirm_with, confirm_button_text = 'Confirm')
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 534da71e39a..2600c00346e 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -268,7 +268,7 @@ RSpec.describe 'Runners' do
it 'group runners are not available' do
visit project_runners_path(project)
- expect(page).not_to have_content 'Group owners can register group runners in the group\'s CI/CD settings.'
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
expect(page).to have_content 'Ask your group owner to set up a group runner'
end
end
@@ -287,7 +287,7 @@ RSpec.describe 'Runners' do
expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).to have_content 'Group owners can register group runners in the group\'s CI/CD settings.'
+ expect(page).to have_content 'To register them, go to the group\'s Runners page.'
expect(page).not_to have_content 'Ask your group owner to set up a group runner'
end
end
@@ -313,7 +313,7 @@ RSpec.describe 'Runners' do
expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).not_to have_content 'Group owners can register group runners in the group\'s CI/CD settings.'
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
expect(page).to have_content 'Ask your group owner to set up a group runner.'
end
end
diff --git a/spec/features/search/user_searches_for_commits_spec.rb b/spec/features/search/user_searches_for_commits_spec.rb
index 279db686aa9..2dceda09d7c 100644
--- a/spec/features/search/user_searches_for_commits_spec.rb
+++ b/spec/features/search/user_searches_for_commits_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'User searches for commits', :js do
+ include CycleAnalyticsHelpers
+
let(:project) { create(:project, :repository) }
let(:sha) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
let(:user) { create(:user) }
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index dbf35567803..8725dbcafe8 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe 'GPG signed commits' do
page.find('.gpg-status-box', text: 'Unverified').click
within '.popover' do
- expect(page).to have_content 'This commit was signed with a verified signature, but the committer email is not verified to belong to the same user.'
+ expect(page).to have_content 'This commit was signed with a verified signature, but the committer email is not associated with the GPG Key.'
expect(page).to have_content 'Bette Cartwright'
expect(page).to have_content '@bette.cartwright'
expect(page).to have_content "GPG Key ID: #{GpgHelpers::User2.primary_keyid}"
diff --git a/spec/features/tags/developer_updates_tag_spec.rb b/spec/features/tags/developer_updates_tag_spec.rb
deleted file mode 100644
index 531ed91c057..00000000000
--- a/spec/features/tags/developer_updates_tag_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# TODO: remove this file together with FF https://gitlab.com/gitlab-org/gitlab/-/issues/366244
-RSpec.describe 'Developer updates tag' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:project) { create(:project, :repository, namespace: group) }
-
- before do
- project.add_developer(user)
- sign_in(user)
- stub_feature_flags(edit_tag_release_notes_via_release_page: false)
- visit project_tags_path(project)
- end
-
- context 'from the tags list page' do
- it 'updates the release notes' do
- find("li > .row-fixed-content.controls a.btn-edit[href='/#{project.full_path}/-/tags/v1.1.0/release/edit']").click
-
- fill_in 'release_description', with: 'Awesome release notes'
- click_button 'Save changes'
-
- expect(page).to have_current_path(
- project_tag_path(project, 'v1.1.0'), ignore_query: true)
- expect(page).to have_content 'v1.1.0'
- expect(page).to have_content 'Awesome release notes'
- end
-
- it 'description has emoji autocomplete', :js do
- page.within(first('.content-list .controls')) do
- click_link 'Edit release notes'
- end
-
- find('#release_description').native.send_keys('')
- fill_in 'release_description', with: ':'
-
- expect(page).to have_selector('.atwho-view')
- end
- end
-
- context 'from a specific tag page' do
- it 'updates the release notes' do
- click_on 'v1.1.0'
- click_link 'Edit release notes'
- fill_in 'release_description', with: 'Awesome release notes'
- click_button 'Save changes'
-
- expect(page).to have_current_path(
- project_tag_path(project, 'v1.1.0'), ignore_query: true)
- expect(page).to have_content 'v1.1.0'
- expect(page).to have_content 'Awesome release notes'
- end
- end
-end
diff --git a/spec/features/task_lists_spec.rb b/spec/features/task_lists_spec.rb
index 6907701de9c..07de3789c08 100644
--- a/spec/features/task_lists_spec.rb
+++ b/spec/features/task_lists_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on Issues#index' do
visit project_issues_path(project)
- expect(page).to have_content("2 of 6 tasks completed")
+ expect(page).to have_content("2 of 6 checklist items completed")
end
end
@@ -108,7 +108,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on Issues#index' do
visit project_issues_path(project)
- expect(page).to have_content("0 of 1 task completed")
+ expect(page).to have_content("0 of 1 checklist item completed")
end
end
@@ -127,7 +127,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on Issues#index' do
visit project_issues_path(project)
- expect(page).to have_content("1 of 1 task completed")
+ expect(page).to have_content("1 of 1 checklist item completed")
end
end
end
@@ -253,7 +253,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
- expect(page).to have_content("2 of 6 tasks completed")
+ expect(page).to have_content("2 of 6 checklist items completed")
end
end
@@ -278,7 +278,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
- expect(page).to have_content("0 of 1 task completed")
+ expect(page).to have_content("0 of 1 checklist item completed")
end
end
@@ -297,7 +297,7 @@ RSpec.describe 'Task Lists', :js do
it 'provides a summary on MergeRequests#index' do
visit project_merge_requests_path(project)
- expect(page).to have_content("1 of 1 task completed")
+ expect(page).to have_content("1 of 1 checklist item completed")
end
end
end
diff --git a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
index 900cd72c17f..cbd2d30d726 100644
--- a/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
+++ b/spec/features/uploads/user_uploads_avatar_to_profile_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'User uploads avatar to profile' do
visit user_path(user)
- expect(page).to have_selector(%Q(img[data-src$="/uploads/-/system/user/avatar/#{user.id}/dk.png?width=90"]))
+ expect(page).to have_selector(%Q(img[src$="/uploads/-/system/user/avatar/#{user.id}/dk.png?width=96"]))
# Cheating here to verify something that isn't user-facing, but is important
expect(user.reload.avatar.file).to exist
diff --git a/spec/features/uploads/user_uploads_file_to_note_spec.rb b/spec/features/uploads/user_uploads_file_to_note_spec.rb
index 589cc9f9b02..2547e2d274c 100644
--- a/spec/features/uploads/user_uploads_file_to_note_spec.rb
+++ b/spec/features/uploads/user_uploads_file_to_note_spec.rb
@@ -16,8 +16,8 @@ RSpec.describe 'User uploads file to note' do
end
context 'before uploading' do
- it 'shows "Attach a file" button', :js do
- expect(page).to have_button('Attach a file')
+ it 'shows "Attach a file or image" button', :js do
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
end
@@ -30,7 +30,7 @@ RSpec.describe 'User uploads file to note' do
click_button 'Cancel'
end
- expect(page).to have_button('Attach a file')
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_button('Cancel')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
@@ -60,16 +60,15 @@ RSpec.describe 'User uploads file to note' do
expect(page).to have_selector('.uploading-error-message', visible: true, text: error_text)
expect(page).to have_button('Try again', visible: true)
expect(page).to have_button('attach a new file', visible: true)
- expect(page).not_to have_button('Attach a file')
end
end
context 'uploading is complete' do
- it 'shows "Attach a file" button on uploading complete', :js do
+ it 'shows "Attach a file or image" button on uploading complete', :js do
dropzone_file([Rails.root.join('spec', 'fixtures', 'dk.png')])
wait_for_requests
- expect(page).to have_button('Attach a file')
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
diff --git a/spec/features/users/email_verification_on_login_spec.rb b/spec/features/users/email_verification_on_login_spec.rb
index 0833f7f6f8e..c8301c2fc91 100644
--- a/spec/features/users/email_verification_on_login_spec.rb
+++ b/spec/features/users/email_verification_on_login_spec.rb
@@ -335,7 +335,7 @@ RSpec.describe 'Email Verification On Login', :clean_gitlab_redis_rate_limiting
mail = find_email_for(user)
expect(mail.to).to match_array([user.email])
expect(mail.subject).to eq('Verify your identity')
- code = mail.body.parts.first.to_s[/\d{#{VerifiesWithEmail::TOKEN_LENGTH}}/]
+ code = mail.body.parts.first.to_s[/\d{#{VerifiesWithEmail::TOKEN_LENGTH}}/o]
reset_delivered_emails!
code
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 3ba3650b608..b875dbe1340 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -49,15 +49,15 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect(page).to have_current_path edit_user_password_path, ignore_query: true
expect(page).to have_content('Please create a password for your new account.')
- fill_in 'user_password', with: 'password'
- fill_in 'user_password_confirmation', with: 'password'
+ fill_in 'user_password', with: user.password
+ fill_in 'user_password_confirmation', with: user.password
click_button 'Change your password'
expect(page).to have_current_path new_user_session_path, ignore_query: true
expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
fill_in 'user_login', with: user.username
- fill_in 'user_password', with: 'password'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
expect_single_session_with_authenticated_ttl
@@ -216,7 +216,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
before do
gitlab_sign_in(user, remember: true)
- expect(page).to have_content('Two-Factor Authentication')
+ expect(page).to have_content('Two-factor authentication code')
end
it 'does not show a "You are already signed in." error message' do
@@ -231,7 +231,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
end
it 'does not allow sign-in if the user password is updated before entering a one-time code' do
- user.update!(password: 'new_password')
+ user.update!(password: User.random_password)
enter_code(user.current_otp)
@@ -365,7 +365,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
end
context 'when logging in via OAuth' do
- let(:user) { create(:omniauth_user, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
+ let(:user) { create(:omniauth_user, :two_factor, extern_uid: 'my-uid', provider: 'saml') }
let(:mock_saml_response) do
File.read('spec/fixtures/authentication/saml_response.xml')
end
@@ -407,7 +407,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
sign_in_using_saml!
- expect(page).to have_content('Two-Factor Authentication')
+ expect(page).to have_content('Two-factor authentication code')
enter_code(user.current_otp)
@@ -468,7 +468,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
@@ -477,14 +477,14 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
end
context 'with invalid username and password' do
- let(:user) { create(:user, password: 'not-the-default') }
+ let(:user) { create(:user) }
it 'blocks invalid login' do
expect(authentication_metrics)
.to increment(:user_unauthenticated_counter)
.and increment(:user_password_invalid_counter)
- gitlab_sign_in(user)
+ gitlab_sign_in(user, password: 'incorrect-password')
expect_single_session_with_short_ttl
expect(page).to have_content('Invalid login or password.')
@@ -788,7 +788,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
@@ -809,7 +809,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
@@ -830,7 +830,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
@@ -873,7 +873,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
fill_in 'user_otp_attempt', with: user.reload.current_otp
@@ -899,7 +899,7 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
visit new_user_session_path
fill_in 'user_login', with: user.email
- fill_in 'user_password', with: '12345678'
+ fill_in 'user_password', with: user.password
click_button 'Sign in'
expect_to_be_on_terms_page
@@ -907,9 +907,11 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions do
expect(page).to have_current_path(new_profile_password_path, ignore_query: true)
- fill_in 'user_password', with: '12345678'
- fill_in 'user_new_password', with: 'new password'
- fill_in 'user_password_confirmation', with: 'new password'
+ new_password = User.random_password
+
+ fill_in 'user_password', with: user.password
+ fill_in 'user_new_password', with: new_password
+ fill_in 'user_password_confirmation', with: new_password
click_button 'Set new password'
expect(page).to have_content('Password successfully changed')
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index 2a444dad486..068e1fd4243 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -132,10 +132,10 @@ RSpec.describe 'User page' do
let_it_be(:followee) { create(:user) }
let_it_be(:follower) { create(:user) }
- it 'does not show link to follow' do
+ it 'does not show button to follow' do
subject
- expect(page).not_to have_link(text: 'Follow', class: 'gl-button')
+ expect(page).not_to have_button(text: 'Follow', class: 'gl-button')
end
it 'shows 0 followers and 0 following' do
@@ -155,11 +155,11 @@ RSpec.describe 'User page' do
expect(page).to have_content('1 following')
end
- it 'does show link to follow' do
+ it 'does show button to follow' do
sign_in(user)
visit user_path(followee)
- expect(page).to have_link(text: 'Follow', class: 'gl-button')
+ expect(page).to have_button(text: 'Follow', class: 'gl-button')
end
it 'does show link to unfollow' do
@@ -168,7 +168,7 @@ RSpec.describe 'User page' do
visit user_path(followee)
- expect(page).to have_link(text: 'Unfollow', class: 'gl-button')
+ expect(page).to have_button(text: 'Unfollow', class: 'gl-button')
end
end
end
diff --git a/spec/features/users/signup_spec.rb b/spec/features/users/signup_spec.rb
index 30441dac7b6..f2381e41de8 100644
--- a/spec/features/users/signup_spec.rb
+++ b/spec/features/users/signup_spec.rb
@@ -3,39 +3,43 @@
require 'spec_helper'
RSpec.shared_examples 'Signup name validation' do |field, max_length, label|
- before do
- visit new_user_registration_path
- end
+ flag_values = [true, false]
+ flag_values.each do |val|
+ before do
+ stub_feature_flags(restyle_login_page: val)
+ visit new_user_registration_path
+ end
- describe "#{field} validation", :js do
- it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do
- fill_in field, with: 'u' * max_length
+ describe "#{field} validation", :js do
+ it "does not show an error border if the user's fullname length is not longer than #{max_length} characters" do
+ fill_in field, with: 'u' * max_length
- expect(find('.name')).not_to have_css '.gl-field-error-outline'
- end
+ expect(find('.name')).not_to have_css '.gl-field-error-outline'
+ end
- it 'shows an error border if the user\'s fullname contains an emoji' do
- simulate_input("##{field}", 'Ehsan 🦋')
+ it 'shows an error border if the user\'s fullname contains an emoji' do
+ simulate_input("##{field}", 'Ehsan 🦋')
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
- it "shows an error border if the user\'s fullname is longer than #{max_length} characters" do
- fill_in field, with: 'n' * (max_length + 1)
+ it "shows an error border if the user\'s fullname is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
- expect(find('.name')).to have_css '.gl-field-error-outline'
- end
+ expect(find('.name')).to have_css '.gl-field-error-outline'
+ end
- it "shows an error message if the user\'s #{label} is longer than #{max_length} characters" do
- fill_in field, with: 'n' * (max_length + 1)
+ it "shows an error message if the user\'s #{label} is longer than #{max_length} characters" do
+ fill_in field, with: 'n' * (max_length + 1)
- expect(page).to have_content("#{label} is too long (maximum is #{max_length} characters).")
- end
+ expect(page).to have_content("#{label} is too long (maximum is #{max_length} characters).")
+ end
- it 'shows an error message if the username contains emojis' do
- simulate_input("##{field}", 'Ehsan 🦋')
+ it 'shows an error message if the username contains emojis' do
+ simulate_input("##{field}", 'Ehsan 🦋')
- expect(page).to have_content("Invalid input, please avoid emojis")
+ expect(page).to have_content("Invalid input, please avoid emojis")
+ end
end
end
end
@@ -43,10 +47,6 @@ end
RSpec.describe 'Signup' do
include TermsHelper
- before do
- stub_application_setting(require_admin_approval_after_user_signup: false)
- end
-
let(:new_user) { build_stubbed(:user) }
def fill_in_signup_form
@@ -63,214 +63,309 @@ RSpec.describe 'Signup' do
visit user_confirmation_path(confirmation_token: new_user_token)
end
- describe 'username validation', :js do
+ flag_values = [true, false]
+ flag_values.each do |val|
before do
- visit new_user_registration_path
+ stub_feature_flags(restyle_login_page: val)
+ stub_application_setting(require_admin_approval_after_user_signup: false)
end
- it 'does not show an error border if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ describe 'username validation', :js do
+ before do
+ visit new_user_registration_path
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'does not show an error border if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'does not show an error border if the username contains dots (.)' do
- simulate_input('#new_user_username', 'new.user.username')
- wait_for_requests
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'does not show an error border if the username contains dots (.)' do
+ simulate_input('#new_user_username', 'new.user.username')
+ wait_for_requests
- it 'does not show an error border if the username length is not longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 255
- wait_for_requests
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).not_to have_css '.gl-field-error-outline'
- end
+ it 'does not show an error border if the username length is not longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 255
+ wait_for_requests
- it 'shows an error border if the username already exists' do
- existing_user = create(:user)
+ expect(find('.username')).not_to have_css '.gl-field-error-outline'
+ end
- fill_in 'new_user_username', with: existing_user.username
- wait_for_requests
+ it 'shows an error border if the username already exists' do
+ existing_user = create(:user)
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ fill_in 'new_user_username', with: existing_user.username
+ wait_for_requests
- it 'shows a success border if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-success-outline'
- end
+ it 'shows a success border if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'shows an error border if the username contains special characters' do
- fill_in 'new_user_username', with: 'new$user!username'
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-success-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error border if the username contains special characters' do
+ fill_in 'new_user_username', with: 'new$user!username'
+ wait_for_requests
- it 'shows an error border if the username is longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 256
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error border if the username is longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 256
+ wait_for_requests
- it 'shows an error message if the username is longer than 255 characters' do
- fill_in 'new_user_username', with: 'u' * 256
- wait_for_requests
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(page).to have_content("Username is too long (maximum is 255 characters).")
- end
+ it 'shows an error message if the username is longer than 255 characters' do
+ fill_in 'new_user_username', with: 'u' * 256
+ wait_for_requests
- it 'shows an error message if the username is less than 2 characters' do
- fill_in 'new_user_username', with: 'u'
- wait_for_requests
+ expect(page).to have_content("Username is too long (maximum is 255 characters).")
+ end
- expect(page).to have_content("Username is too short (minimum is 2 characters).")
- end
+ it 'shows an error message if the username is less than 2 characters' do
+ fill_in 'new_user_username', with: 'u'
+ wait_for_requests
- it 'shows an error message on submit if the username contains special characters' do
- fill_in 'new_user_username', with: 'new$user!username'
- wait_for_requests
+ expect(page).to have_content("Username is too short (minimum is 2 characters).")
+ end
- click_button "Register"
+ it 'shows an error message on submit if the username contains special characters' do
+ fill_in 'new_user_username', with: 'new$user!username'
+ wait_for_requests
- expect(page).to have_content("Please create a username with only alphanumeric characters.")
- end
+ click_button "Register"
- it 'shows an error border if the username contains emojis' do
- simulate_input('#new_user_username', 'ehsan😀')
+ expect(page).to have_content("Please create a username with only alphanumeric characters.")
+ end
- expect(find('.username')).to have_css '.gl-field-error-outline'
- end
+ it 'shows an error border if the username contains emojis' do
+ simulate_input('#new_user_username', 'ehsan😀')
- it 'shows an error message if the username contains emojis' do
- simulate_input('#new_user_username', 'ehsan😀')
+ expect(find('.username')).to have_css '.gl-field-error-outline'
+ end
- expect(page).to have_content("Invalid input, please avoid emojis")
- end
+ it 'shows an error message if the username contains emojis' do
+ simulate_input('#new_user_username', 'ehsan😀')
- it 'shows a pending message if the username availability is being fetched', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
- fill_in 'new_user_username', with: 'new-user'
+ expect(page).to have_content("Invalid input, please avoid emojis")
+ end
- expect(find('.username > .validation-pending')).not_to have_css '.hide'
- end
+ it 'shows a pending message if the username availability is being fetched', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31484' do
+ fill_in 'new_user_username', with: 'new-user'
- it 'shows a success message if the username is available' do
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username > .validation-pending')).not_to have_css '.hide'
+ end
- expect(find('.username > .validation-success')).not_to have_css '.hide'
- end
+ it 'shows a success message if the username is available' do
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
- it 'shows an error message if the username is unavailable' do
- existing_user = create(:user)
+ expect(find('.username > .validation-success')).not_to have_css '.hide'
+ end
- fill_in 'new_user_username', with: existing_user.username
- wait_for_requests
+ it 'shows an error message if the username is unavailable' do
+ existing_user = create(:user)
- expect(find('.username > .validation-error')).not_to have_css '.hide'
- end
+ fill_in 'new_user_username', with: existing_user.username
+ wait_for_requests
- it 'shows a success message if the username is corrected and then available' do
- fill_in 'new_user_username', with: 'new-user$'
- wait_for_requests
- fill_in 'new_user_username', with: 'new-user'
- wait_for_requests
+ expect(find('.username > .validation-error')).not_to have_css '.hide'
+ end
- expect(page).to have_content("Username is available.")
+ it 'shows a success message if the username is corrected and then available' do
+ fill_in 'new_user_username', with: 'new-user$'
+ wait_for_requests
+ fill_in 'new_user_username', with: 'new-user'
+ wait_for_requests
+
+ expect(page).to have_content("Username is available.")
+ end
end
- end
- context 'with no errors' do
- context 'when sending confirmation email' do
- before do
- stub_application_setting(send_user_confirmation_email: true)
+ context 'with no errors' do
+ context 'when sending confirmation email' do
+ before do
+ stub_application_setting(send_user_confirmation_email: true)
+ end
+
+ context 'when soft email confirmation is not enabled' do
+ before do
+ stub_feature_flags(soft_email_confirmation: false)
+ end
+
+ it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
+ visit new_user_registration_path
+
+ fill_in_signup_form
+
+ expect { click_button 'Register' }.to change { User.count }.by(1)
+ expect(page).to have_current_path users_almost_there_path, ignore_query: true
+ expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
+
+ confirm_email
+
+ expect(find_field('Username or email').value).to eq(new_user.email)
+ end
+ end
+
+ context 'when soft email confirmation is enabled' do
+ before do
+ stub_feature_flags(soft_email_confirmation: true)
+ end
+
+ it 'creates the user account and sends a confirmation email' do
+ visit new_user_registration_path
+
+ fill_in_signup_form
+
+ expect { click_button 'Register' }.to change { User.count }.by(1)
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ end
+ end
end
- context 'when soft email confirmation is not enabled' do
+ context "when not sending confirmation email" do
before do
- stub_feature_flags(soft_email_confirmation: false)
+ stub_application_setting(send_user_confirmation_email: false)
end
- it 'creates the user account and sends a confirmation email, and pre-fills email address after confirming' do
+ it 'creates the user account and goes to dashboard' do
visit new_user_registration_path
fill_in_signup_form
+ click_button "Register"
- expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(page).to have_current_path users_almost_there_path, ignore_query: true
- expect(page).to have_content("Please check your email (#{new_user.email}) to confirm your account")
-
- confirm_email
-
- expect(find_field('Username or email').value).to eq(new_user.email)
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
end
end
- context 'when soft email confirmation is enabled' do
+ context 'with required admin approval enabled' do
before do
- stub_feature_flags(soft_email_confirmation: true)
+ stub_application_setting(require_admin_approval_after_user_signup: true)
end
- it 'creates the user account and sends a confirmation email' do
+ it 'creates the user but does not sign them in' do
visit new_user_registration_path
fill_in_signup_form
expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ expect(page).to have_current_path new_user_session_path, ignore_query: true
+ expect(page).to have_content("You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator")
end
end
end
- context "when not sending confirmation email" do
- before do
- stub_application_setting(send_user_confirmation_email: false)
+ context 'with errors' do
+ it "displays the errors" do
+ create(:user, email: new_user.email)
+ visit new_user_registration_path
+
+ fill_in_signup_form
+ click_button "Register"
+
+ expect(page).to have_current_path user_registration_path, ignore_query: true
+ expect(page).to have_content("error prohibited this user from being saved")
+ expect(page).to have_content("Email has already been taken")
end
- it 'creates the user account and goes to dashboard' do
+ it 'does not redisplay the password' do
+ create(:user, email: new_user.email)
visit new_user_registration_path
fill_in_signup_form
click_button "Register"
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
+ expect(page).to have_current_path user_registration_path, ignore_query: true
+ expect(page.body).not_to match(/#{new_user.password}/)
end
end
- context 'with required admin approval enabled' do
+ context 'when terms are enforced' do
before do
- stub_application_setting(require_admin_approval_after_user_signup: true)
+ enforce_terms
end
- it 'creates the user but does not sign them in' do
+ it 'renders text that the user confirms terms by clicking register' do
visit new_user_registration_path
+ expect(page).to have_content(/By clicking Register, I agree that I have read and accepted the Terms of Use and Privacy Policy/)
+
fill_in_signup_form
+ click_button 'Register'
- expect { click_button 'Register' }.to change { User.count }.by(1)
- expect(page).to have_current_path new_user_session_path, ignore_query: true
- expect(page).to have_content("You have signed up successfully. However, we could not sign you in because your account is awaiting approval from your GitLab administrator")
+ expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
end
end
- end
- context 'with errors' do
- it "displays the errors" do
- create(:user, email: new_user.email)
+ context 'when reCAPTCHA and invisible captcha are enabled' do
+ before do
+ stub_application_setting(invisible_captcha_enabled: true)
+ stub_application_setting(recaptcha_enabled: true)
+ allow_next_instance_of(RegistrationsController) do |instance|
+ allow(instance).to receive(:verify_recaptcha).and_return(true)
+ end
+ end
+
+ context 'when reCAPTCHA detects malicious behaviour' do
+ before do
+ allow_next_instance_of(RegistrationsController) do |instance|
+ allow(instance).to receive(:verify_recaptcha).and_return(false)
+ end
+ end
+
+ it 'prevents from signing up' do
+ visit new_user_registration_path
+
+ fill_in_signup_form
+
+ expect { click_button 'Register' }.not_to change { User.count }
+ expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ end
+ end
+
+ context 'when invisible captcha detects malicious behaviour' do
+ it 'prevents from signing up' do
+ visit new_user_registration_path
+
+ fill_in_signup_form
+
+ expect { click_button 'Register' }.not_to change { User.count }
+ expect(page).to have_content('That was a bit too quick! Please resubmit.')
+ end
+ end
+ end
+
+ it 'redirects to step 2 of the signup process, sets the role and redirects back' do
+ stub_feature_flags(about_your_company_registration_flow: false)
visit new_user_registration_path
fill_in_signup_form
- click_button "Register"
+ click_button 'Register'
- expect(page).to have_current_path user_registration_path, ignore_query: true
- expect(page).to have_content("error prohibited this user from being saved")
- expect(page).to have_content("Email has already been taken")
+ visit new_project_path
+
+ expect(page).to have_current_path(users_sign_up_welcome_path)
+
+ select 'Software Developer', from: 'user_role'
+ click_button 'Get started!'
+
+ created_user = User.find_by_username(new_user.username)
+
+ expect(created_user.software_developer_role?).to be_truthy
+ expect(created_user.setup_for_company).to be_nil
+ expect(page).to have_current_path(new_project_path)
end
it 'does not redisplay the password' do
@@ -283,6 +378,12 @@ RSpec.describe 'Signup' do
expect(page).to have_current_path user_registration_path, ignore_query: true
expect(page.body).not_to match(/#{new_user.password}/)
end
+
+ context 'with invalid email', :saas, :js do
+ it_behaves_like 'user email validation' do
+ let(:path) { new_user_registration_path }
+ end
+ end
end
context 'when terms are enforced' do
@@ -298,69 +399,21 @@ RSpec.describe 'Signup' do
fill_in_signup_form
click_button 'Register'
- expect(page).to have_current_path users_sign_up_welcome_path, ignore_query: true
- end
- end
-
- context 'when reCAPTCHA and invisible captcha are enabled' do
- before do
- stub_application_setting(invisible_captcha_enabled: true)
- stub_application_setting(recaptcha_enabled: true)
- allow_next_instance_of(RegistrationsController) do |instance|
- allow(instance).to receive(:verify_recaptcha).and_return(true)
- end
- end
-
- context 'when reCAPTCHA detects malicious behaviour' do
- before do
- allow_next_instance_of(RegistrationsController) do |instance|
- allow(instance).to receive(:verify_recaptcha).and_return(false)
- end
- end
-
- it 'prevents from signing up' do
- visit new_user_registration_path
+ visit new_project_path
- fill_in_signup_form
+ expect(page).to have_current_path(users_sign_up_welcome_path)
- expect { click_button 'Register' }.not_to change { User.count }
- expect(page).to have_content(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
- end
- end
-
- context 'when invisible captcha detects malicious behaviour' do
- it 'prevents from signing up' do
- visit new_user_registration_path
+ select 'Software Developer', from: 'user_role'
+ click_button 'Get started!'
- fill_in_signup_form
+ created_user = User.find_by_username(new_user.username)
- expect { click_button 'Register' }.not_to change { User.count }
- expect(page).to have_content('That was a bit too quick! Please resubmit.')
- end
+ expect(created_user.software_developer_role?).to be_truthy
+ expect(created_user.setup_for_company).to be_nil
+ expect(page).to have_current_path(new_project_path)
end
- end
-
- it 'redirects to step 2 of the signup process, sets the role and redirects back' do
- stub_feature_flags(about_your_company_registration_flow: false)
- visit new_user_registration_path
-
- fill_in_signup_form
- click_button 'Register'
-
- visit new_project_path
- expect(page).to have_current_path(users_sign_up_welcome_path)
-
- select 'Software Developer', from: 'user_role'
- click_button 'Get started!'
-
- created_user = User.find_by_username(new_user.username)
-
- expect(created_user.software_developer_role?).to be_truthy
- expect(created_user.setup_for_company).to be_nil
- expect(page).to have_current_path(new_project_path)
+ it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
+ it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
end
-
- it_behaves_like 'Signup name validation', 'new_user_first_name', 127, 'First name'
- it_behaves_like 'Signup name validation', 'new_user_last_name', 127, 'Last name'
end
diff --git a/spec/finders/autocomplete/deploy_keys_with_write_access_finder_spec.rb b/spec/finders/autocomplete/deploy_keys_with_write_access_finder_spec.rb
new file mode 100644
index 00000000000..ed3b1d2d0bf
--- /dev/null
+++ b/spec/finders/autocomplete/deploy_keys_with_write_access_finder_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Autocomplete::DeployKeysWithWriteAccessFinder do
+ let_it_be(:user) { create(:user) }
+
+ let(:finder) { described_class.new(user, project) }
+
+ describe '#execute' do
+ subject(:execute) { finder.execute }
+
+ context 'when project is missing' do
+ let(:project) { nil }
+
+ it 'returns an empty ActiveRecord::Relation' do
+ expect(execute).to eq(DeployKey.none)
+ end
+ end
+
+ context 'when project is present' do
+ let_it_be(:project) { create(:project, :public) }
+
+ context 'and current user cannot admin project' do
+ it 'raises Gitlab::Access::AccessDeniedError' do
+ expect { execute }.to raise_error(Gitlab::Access::AccessDeniedError)
+ end
+ end
+
+ context 'and current user can admin project' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ context 'when deploy key does not have write access to project' do
+ let(:deploy_key_project) { create(:deploy_keys_project, project: project) }
+
+ it 'returns an empty ActiveRecord::Relation' do
+ expect(execute).to eq(DeployKey.none)
+ end
+ end
+
+ context 'when deploy key has write access to project' do
+ let(:deploy_key_project) { create(:deploy_keys_project, :write_access, project: project) }
+
+ it 'returns the deploy keys' do
+ expect(execute).to match_array([deploy_key_project.deploy_key])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index aeab5a51766..96412c1e371 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -92,10 +92,8 @@ RSpec.describe Ci::RunnersFinder do
context 'set to an invalid value' do
let(:upgrade_status) { :some_invalid_status }
- it 'does not call with_upgrade_status' do
- expect(Ci::Runner).not_to receive(:with_upgrade_status)
-
- expect(execute).to match_array(Ci::Runner.all)
+ it 'raises ArgumentError' do
+ expect { execute }.to raise_error(ArgumentError)
end
end
diff --git a/spec/finders/clusters/knative_services_finder_spec.rb b/spec/finders/clusters/knative_services_finder_spec.rb
index c61fac27bd9..6f5a4db10e8 100644
--- a/spec/finders/clusters/knative_services_finder_spec.rb
+++ b/spec/finders/clusters/knative_services_finder_spec.rb
@@ -88,6 +88,7 @@ RSpec.describe Clusters::KnativeServicesFinder do
end
it { is_expected.to be_truthy }
+
it "discovers knative installation" do
expect { subject }
.to change { finder.cluster.kubeclient.knative_client.discovered }
@@ -102,6 +103,7 @@ RSpec.describe Clusters::KnativeServicesFinder do
end
it { is_expected.to be_falsy }
+
it "does not discover knative installation" do
expect { subject }.not_to change { cluster.kubeclient.knative_client.discovered }
end
diff --git a/spec/finders/concerns/finder_with_cross_project_access_spec.rb b/spec/finders/concerns/finder_with_cross_project_access_spec.rb
index 0798528c200..a4b483a8e5e 100644
--- a/spec/finders/concerns/finder_with_cross_project_access_spec.rb
+++ b/spec/finders/concerns/finder_with_cross_project_access_spec.rb
@@ -97,7 +97,11 @@ RSpec.describe FinderWithCrossProjectAccess do
end
it 're-enables the check after the find failed' do
- finder.find(non_existing_record_id) rescue ActiveRecord::RecordNotFound
+ begin
+ finder.find(non_existing_record_id)
+ rescue ActiveRecord::RecordNotFound
+ nil
+ end
expect(finder.instance_variable_get(:@should_skip_cross_project_check))
.to eq(false)
diff --git a/spec/finders/crm/contacts_finder_spec.rb b/spec/finders/crm/contacts_finder_spec.rb
index dd5274a0574..43dcced53fd 100644
--- a/spec/finders/crm/contacts_finder_spec.rb
+++ b/spec/finders/crm/contacts_finder_spec.rb
@@ -141,6 +141,67 @@ RSpec.describe Crm::ContactsFinder do
expect(finder.execute).to match_array([search_test_b])
end
end
+
+ context 'when sorting' do
+ let_it_be(:search_test_c) do
+ create(
+ :contact,
+ group: search_test_group,
+ email: "a@test.com",
+ organization: create(:organization, name: "Company Z")
+ )
+ end
+
+ let_it_be(:search_test_d) do
+ create(
+ :contact,
+ group: search_test_group,
+ email: "b@test.com",
+ organization: create(:organization, name: "Company A")
+ )
+ end
+
+ it 'returns the contacts sorted by email in ascending order' do
+ finder = described_class.new(user, group: search_test_group, sort: { field: 'email', direction: :asc })
+
+ expect(finder.execute).to eq([search_test_c, search_test_d, search_test_a, search_test_b])
+ end
+
+ it 'returns the contacts sorted by description in ascending order' do
+ finder = described_class.new(user, group: search_test_group, sort: { field: 'description', direction: :desc })
+
+ results = finder.execute
+
+ expect(results[0]).to eq(search_test_b)
+ expect(results[1]).to eq(search_test_a)
+ end
+
+ it 'returns the contacts sorted by organization in ascending order' do
+ finder = described_class.new(user, group: search_test_group, sort: { field: 'organization', direction: :asc })
+
+ results = finder.execute
+
+ expect(results[0]).to eq(search_test_d)
+ expect(results[1]).to eq(search_test_c)
+ end
+ end
+ end
+ end
+
+ describe '.counts_by_state' do
+ let_it_be(:group) { create(:group, :crm_enabled) }
+ let_it_be(:active_contacts) { create_list(:contact, 3, group: group, state: :active) }
+ let_it_be(:inactive_contacts) { create_list(:contact, 2, group: group, state: :inactive) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ it 'returns correct counts' do
+ counts = described_class.counts_by_state(user, group: group)
+
+ expect(counts["active"]).to eq(3)
+ expect(counts["inactive"]).to eq(2)
end
end
end
diff --git a/spec/finders/fork_targets_finder_spec.rb b/spec/finders/fork_targets_finder_spec.rb
index fe5b50ef030..1acc38bb492 100644
--- a/spec/finders/fork_targets_finder_spec.rb
+++ b/spec/finders/fork_targets_finder_spec.rb
@@ -5,27 +5,27 @@ require 'spec_helper'
RSpec.describe ForkTargetsFinder do
subject(:finder) { described_class.new(project, user) }
- let(:project) { create(:project, namespace: create(:group)) }
- let(:user) { create(:user) }
- let!(:maintained_group) do
+ let_it_be(:project) { create(:project, namespace: create(:group)) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:maintained_group) do
create(:group).tap { |g| g.add_maintainer(user) }
end
- let!(:owned_group) do
+ let_it_be(:owned_group) do
create(:group).tap { |g| g.add_owner(user) }
end
- let!(:developer_group) do
+ let_it_be(:developer_group) do
create(:group, project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS).tap do |g|
g.add_developer(user)
end
end
- let!(:reporter_group) do
+ let_it_be(:reporter_group) do
create(:group).tap { |g| g.add_reporter(user) }
end
- let!(:guest_group) do
+ let_it_be(:guest_group) do
create(:group).tap { |g| g.add_guest(user) }
end
@@ -33,7 +33,7 @@ RSpec.describe ForkTargetsFinder do
project.namespace.add_owner(user)
end
- describe '#execute' do
+ shared_examples 'returns namespaces and groups' do
it 'returns all user manageable namespaces' do
expect(finder.execute).to match_array([user.namespace, maintained_group, owned_group, project.namespace, developer_group])
end
@@ -46,4 +46,28 @@ RSpec.describe ForkTargetsFinder do
expect(finder.execute(only_groups: true)).to include(a_kind_of(Group))
end
end
+
+ describe '#execute' do
+ it_behaves_like 'returns namespaces and groups'
+
+ context 'when search is provided' do
+ it 'filters the targets by the param' do
+ expect(finder.execute(search: maintained_group.path)).to eq([maintained_group])
+ end
+ end
+
+ context 'when searchable_fork_targets feature flag is disabled' do
+ before do
+ stub_feature_flags(searchable_fork_targets: false)
+ end
+
+ it_behaves_like 'returns namespaces and groups'
+
+ context 'when search is provided' do
+ it 'ignores the param and returns all user manageable namespaces' do
+ expect(finder.execute).to match_array([user.namespace, maintained_group, owned_group, project.namespace, developer_group])
+ end
+ end
+ end
+ end
end
diff --git a/spec/finders/groups/accepting_project_transfers_finder_spec.rb b/spec/finders/groups/accepting_project_transfers_finder_spec.rb
new file mode 100644
index 00000000000..e73318c763f
--- /dev/null
+++ b/spec/finders/groups/accepting_project_transfers_finder_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::AcceptingProjectTransfersFinder do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group_where_direct_owner) { create(:group) }
+ let_it_be(:subgroup_of_group_where_direct_owner) { create(:group, parent: group_where_direct_owner) }
+ let_it_be(:group_where_direct_maintainer) { create(:group) }
+ let_it_be(:group_where_direct_maintainer_but_cant_create_projects) do
+ create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS)
+ end
+
+ let_it_be(:group_where_direct_developer) { create(:group) }
+ let_it_be(:shared_with_group_where_direct_owner_as_owner) { create(:group) }
+ let_it_be(:shared_with_group_where_direct_owner_as_guest) { create(:group) }
+ let_it_be(:shared_with_group_where_direct_owner_as_maintainer) { create(:group) }
+ let_it_be(:shared_with_group_where_direct_developer_as_owner) { create(:group) }
+ let_it_be(:subgroup_of_shared_with_group_where_direct_owner_as_maintainer) do
+ create(:group, parent: shared_with_group_where_direct_owner_as_maintainer)
+ end
+
+ before do
+ group_where_direct_owner.add_owner(user)
+ group_where_direct_maintainer.add_maintainer(user)
+ group_where_direct_developer.add_developer(user)
+
+ create(:group_group_link, :owner,
+ shared_with_group: group_where_direct_owner,
+ shared_group: shared_with_group_where_direct_owner_as_owner
+ )
+
+ create(:group_group_link, :guest,
+ shared_with_group: group_where_direct_owner,
+ shared_group: shared_with_group_where_direct_owner_as_guest
+ )
+
+ create(:group_group_link, :maintainer,
+ shared_with_group: group_where_direct_owner,
+ shared_group: shared_with_group_where_direct_owner_as_maintainer
+ )
+
+ create(:group_group_link, :owner,
+ shared_with_group: group_where_direct_developer,
+ shared_group: shared_with_group_where_direct_developer_as_owner
+ )
+ end
+
+ describe '#execute' do
+ subject(:result) { described_class.new(user).execute }
+
+ it 'only returns groups where the user has access to transfer projects to' do
+ expect(result).to match_array([
+ group_where_direct_owner,
+ subgroup_of_group_where_direct_owner,
+ group_where_direct_maintainer,
+ shared_with_group_where_direct_owner_as_owner,
+ shared_with_group_where_direct_owner_as_maintainer,
+ subgroup_of_shared_with_group_where_direct_owner_as_maintainer
+ ])
+ end
+ end
+end
diff --git a/spec/finders/groups/user_groups_finder_spec.rb b/spec/finders/groups/user_groups_finder_spec.rb
index 9339741da79..999079468e5 100644
--- a/spec/finders/groups/user_groups_finder_spec.rb
+++ b/spec/finders/groups/user_groups_finder_spec.rb
@@ -5,17 +5,19 @@ require 'spec_helper'
RSpec.describe Groups::UserGroupsFinder do
describe '#execute' do
let_it_be(:user) { create(:user) }
+ let_it_be(:root_group) { create(:group, name: 'Root group', path: 'root-group') }
let_it_be(:guest_group) { create(:group, name: 'public guest', path: 'public-guest') }
- let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer') }
- let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
- let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer', parent: root_group) }
+ let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer', parent: root_group) }
+ let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer', parent: root_group) }
let_it_be(:public_owner_group) { create(:group, name: 'a public owner', path: 'a-public-owner') }
- subject { described_class.new(current_user, target_user, arguments).execute }
+ subject { described_class.new(current_user, target_user, arguments.merge(search_arguments)).execute }
let(:arguments) { {} }
let(:current_user) { user }
let(:target_user) { user }
+ let(:search_arguments) { {} }
before_all do
guest_group.add_guest(user)
@@ -25,15 +27,40 @@ RSpec.describe Groups::UserGroupsFinder do
public_owner_group.add_owner(user)
end
- it 'returns all groups where the user is a direct member' do
- is_expected.to match(
- [
+ shared_examples 'user group finder searching by name or path' do
+ let(:search_arguments) { { search: 'maintainer' } }
+
+ specify do
+ is_expected.to contain_exactly(
public_maintainer_group,
- public_owner_group,
- private_maintainer_group,
- public_developer_group,
- guest_group
- ]
+ private_maintainer_group
+ )
+ end
+
+ context 'when searching for a full path (including parent)' do
+ let(:search_arguments) { { search: 'root-group/b-private-maintainer' } }
+
+ specify do
+ is_expected.to contain_exactly(private_maintainer_group)
+ end
+ end
+
+ context 'when search keywords include the parent route' do
+ let(:search_arguments) { { search: 'root public' } }
+
+ specify do
+ is_expected.to match(keyword_search_expected_groups)
+ end
+ end
+ end
+
+ it 'returns all groups where the user is a direct member' do
+ is_expected.to contain_exactly(
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group,
+ public_developer_group,
+ guest_group
)
end
@@ -53,26 +80,20 @@ RSpec.describe Groups::UserGroupsFinder do
let(:arguments) { { permission_scope: :create_projects } }
specify do
- is_expected.to match(
+ is_expected.to contain_exactly(
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group,
+ public_developer_group
+ )
+ end
+
+ it_behaves_like 'user group finder searching by name or path' do
+ let(:keyword_search_expected_groups) do
[
public_maintainer_group,
- public_owner_group,
- private_maintainer_group,
public_developer_group
]
- )
- end
-
- context 'when search is provided' do
- let(:arguments) { { permission_scope: :create_projects, search: 'maintainer' } }
-
- specify do
- is_expected.to match(
- [
- public_maintainer_group,
- private_maintainer_group
- ]
- )
end
end
end
@@ -81,38 +102,15 @@ RSpec.describe Groups::UserGroupsFinder do
let(:arguments) { { permission_scope: :transfer_projects } }
specify do
- is_expected.to match(
- [
- public_maintainer_group,
- public_owner_group,
- private_maintainer_group
- ]
+ is_expected.to contain_exactly(
+ public_maintainer_group,
+ public_owner_group,
+ private_maintainer_group
)
end
- context 'when search is provided' do
- let(:arguments) { { permission_scope: :transfer_projects, search: 'owner' } }
-
- specify do
- is_expected.to match(
- [
- public_owner_group
- ]
- )
- end
- end
- end
-
- context 'when search is provided' do
- let(:arguments) { { search: 'maintainer' } }
-
- specify do
- is_expected.to match(
- [
- public_maintainer_group,
- private_maintainer_group
- ]
- )
+ it_behaves_like 'user group finder searching by name or path' do
+ let(:keyword_search_expected_groups) { [public_maintainer_group] }
end
end
end
diff --git a/spec/finders/projects/topics_finder_spec.rb b/spec/finders/projects/topics_finder_spec.rb
index 3812f0757bc..d6e90e5958f 100644
--- a/spec/finders/projects/topics_finder_spec.rb
+++ b/spec/finders/projects/topics_finder_spec.rb
@@ -5,13 +5,13 @@ require 'spec_helper'
RSpec.describe Projects::TopicsFinder do
let_it_be(:user) { create(:user) }
- let!(:topic1) { create(:topic, name: 'topicB') }
- let!(:topic2) { create(:topic, name: 'topicC') }
- let!(:topic3) { create(:topic, name: 'topicA') }
+ let_it_be(:topic1) { create(:topic, name: 'topicB') }
+ let_it_be(:topic2) { create(:topic, name: 'topicC') }
+ let_it_be(:topic3) { create(:topic, name: 'topicA') }
- let!(:project1) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA, topicB') }
- let!(:project2) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA') }
- let!(:project3) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC') }
+ let_it_be(:project1) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA, topicB') }
+ let_it_be(:project2) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC, topicA') }
+ let_it_be(:project3) { create(:project, :public, namespace: user.namespace, topic_list: 'topicC') }
describe '#execute' do
it 'returns topics' do
@@ -41,5 +41,21 @@ RSpec.describe Projects::TopicsFinder do
end
end
end
+
+ context 'filter by without_projects' do
+ let_it_be(:topic4) { create(:topic, name: 'unassigned topic') }
+
+ it 'returns topics without assigned projects' do
+ topics = described_class.new(params: { without_projects: true }).execute
+
+ expect(topics).to contain_exactly(topic4)
+ end
+
+ it 'returns topics without assigned projects' do
+ topics = described_class.new(params: { without_projects: false }).execute
+
+ expect(topics).to contain_exactly(topic1, topic2, topic3, topic4)
+ end
+ end
end
end
diff --git a/spec/finders/tags_finder_spec.rb b/spec/finders/tags_finder_spec.rb
index 70d79ced81d..0bf9b228c8a 100644
--- a/spec/finders/tags_finder_spec.rb
+++ b/spec/finders/tags_finder_spec.rb
@@ -175,6 +175,16 @@ RSpec.describe TagsFinder do
end
end
end
+
+ context 'pagination and search' do
+ let(:params) { { search: '1.1.1', per_page: 1 } }
+
+ it 'ignores the pagination for search' do
+ result = subject
+
+ expect(result.map(&:name)).to eq(%w(v1.1.1))
+ end
+ end
end
context 'when Gitaly is unavailable' do
diff --git a/spec/fixtures/api/schemas/entities/discussion.json b/spec/fixtures/api/schemas/entities/discussion.json
index efc31a4f833..da2d2a83a8d 100644
--- a/spec/fixtures/api/schemas/entities/discussion.json
+++ b/spec/fixtures/api/schemas/entities/discussion.json
@@ -67,7 +67,8 @@
"toggle_award_path": { "type": "string" },
"path": { "type": "string" },
"commands_changes": { "type": "object", "additionalProperties": true },
- "confidential": { "type": ["boolean", "null"] }
+ "confidential": { "type": ["boolean", "null"] },
+ "internal": { "type": ["boolean", "null"] }
},
"required": [
"id", "attachment", "author", "created_at", "updated_at",
diff --git a/spec/fixtures/api/schemas/external_validation.json b/spec/fixtures/api/schemas/external_validation.json
index ddcabd4c61e..4a2538a020e 100644
--- a/spec/fixtures/api/schemas/external_validation.json
+++ b/spec/fixtures/api/schemas/external_validation.json
@@ -4,7 +4,8 @@
"project",
"user",
"pipeline",
- "builds"
+ "builds",
+ "total_builds_count"
],
"properties" : {
"project": {
@@ -80,6 +81,7 @@
}
}
}
- }
+ },
+ "total_builds_count": { "type": "integer" }
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/notes.json b/spec/fixtures/api/schemas/public_api/v4/notes.json
index 25e8f9cbed6..d6d0300a64f 100644
--- a/spec/fixtures/api/schemas/public_api/v4/notes.json
+++ b/spec/fixtures/api/schemas/public_api/v4/notes.json
@@ -32,7 +32,8 @@
"resolvable": { "type": "boolean" },
"resolved_by": { "type": ["string", "null"] },
"resolved_at": { "type": ["string", "null"] },
- "confidential": { "type": ["boolean", "null"] }
+ "confidential": { "type": ["boolean", "null"] },
+ "internal": { "type": ["boolean", "null"] }
},
"required": [
"id", "body", "attachment", "author", "created_at", "updated_at",
diff --git a/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz b/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz
index e99136e96b7..d6632c5121a 100644
--- a/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz
+++ b/spec/fixtures/gitlab/import_export/corrupted_project_export.tar.gz
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/100_files.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/100_files.zip
new file mode 100644
index 00000000000..31124abc0e5
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/100_files.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/200_mb_decompressed.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/200_mb_decompressed.zip
new file mode 100644
index 00000000000..8c56cce641a
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/200_mb_decompressed.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zip
new file mode 100644
index 00000000000..09ac4e5df51
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/multiple_files.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zip
new file mode 100644
index 00000000000..81768a9f2b3
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/single_file.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/with_directory.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/with_directory.zip
new file mode 100644
index 00000000000..6de321ea86a
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/with_directory.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/zipbomb.zip b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/zipbomb.zip
new file mode 100644
index 00000000000..b8cfcef9739
--- /dev/null
+++ b/spec/fixtures/lib/gitlab/ci/build/artifacts/adapters/zip_stream/zipbomb.zip
Binary files differ
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 12dbabf833b..5bcf6521471 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -3149,6 +3149,72 @@
"created_at": "2020-01-07T11:21:21.235Z",
"updated_at": "2020-01-07T11:21:21.235Z"
}
+ ],
+ "merge_request_assignees": [
+ {
+ "user_id": 1,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "state": "unreviewed"
+ },
+ {
+ "user_id": 15,
+ "created_at": "2020-01-08T11:21:21.235Z",
+ "state": "reviewed"
+ },
+ {
+ "user_id": 16,
+ "created_at": "2020-01-09T11:21:21.235Z",
+ "state": "attention_requested"
+ },
+ {
+ "user_id": 6,
+ "created_at": "2020-01-10T11:21:21.235Z",
+ "state": "unreviewed"
+ }
+ ],
+ "merge_request_reviewers": [
+ {
+ "user_id": 1,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "state": "unreviewed"
+ },
+ {
+ "user_id": 15,
+ "created_at": "2020-01-08T11:21:21.235Z",
+ "state": "reviewed"
+ },
+ {
+ "user_id": 16,
+ "created_at": "2020-01-09T11:21:21.235Z",
+ "state": "attention_requested"
+ },
+ {
+ "user_id": 6,
+ "created_at": "2020-01-10T11:21:21.235Z",
+ "state": "unreviewed"
+ }
+ ],
+ "approvals": [
+ {
+ "user_id": 1,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-08T11:21:21.235Z"
+ },
+ {
+ "user_id": 15,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-08T11:21:21.235Z"
+ },
+ {
+ "user_id": 16,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-08T11:21:21.235Z"
+ },
+ {
+ "user_id": 6,
+ "created_at": "2020-01-07T11:21:21.235Z",
+ "updated_at": "2020-01-08T11:21:21.235Z"
+ }
]
},
{
@@ -3416,7 +3482,10 @@
"action": 1,
"author_id": 1
}
- ]
+ ],
+ "merge_request_assignees": [],
+ "merge_request_reviewers": [],
+ "approvals": []
},
{
"id": 15,
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
index 16e45509a1b..c14221adc1c 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
+++ b/spec/fixtures/lib/gitlab/import_export/complex/tree/project/merge_requests.ndjson
@@ -1,5 +1,5 @@
-{"id":27,"target_branch":"feature","source_branch":"feature_conflict","source_project_id":2147483547,"author_id":1,"assignee_id":null,"title":"MR1","created_at":"2016-06-14T15:02:36.568Z","updated_at":"2016-06-14T15:02:56.815Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":9,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"diff_head_sha":"HEAD","source_branch_sha":"ABCD","target_branch_sha":"DCBA","merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":true,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":669,"note":"added 3 commits\n\n<ul><li>16ea4e20...074a2a32 - 2 commits from branch <code>master</code></li><li>ca223a02 - readme: fix typos</li></ul>\n\n[Compare with previous version](/group/project/merge_requests/1/diffs?diff_id=1189&start_sha=16ea4e207fb258fe4e9c73185a725207c9a4f3e1)","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4789,"commit_count":3,"action":"commit","created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z"},"events":[],"suggestions":[]},{"id":670,"note":"unmarked as a **Work In Progress**","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4790,"commit_count":null,"action":"title","created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z"},"events":[],"suggestions":[]},{"id":671,"note":"Sit voluptatibus eveniet architecto quidem.","note_html":"<p>something else entirely</p>","cached_markdown_version":917504,"noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.632Z","updated_at":"2016-06-14T15:02:56.632Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[],"award_emoji":[{"id":1,"name":"tada","user_id":1,"awardable_type":"Note","awardable_id":1,"created_at":"2019-11-05T15:37:21.287Z","updated_at":"2019-11-05T15:37:21.287Z"}]},{"id":672,"note":"Odio maxime ratione voluptatibus sed.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.656Z","updated_at":"2016-06-14T15:02:56.656Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":673,"note":"Et deserunt et omnis nihil excepturi accusantium.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.679Z","updated_at":"2016-06-14T15:02:56.679Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":674,"note":"Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.700Z","updated_at":"2016-06-14T15:02:56.700Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[],"suggestions":[{"id":1,"note_id":674,"relative_order":0,"applied":false,"commit_id":null,"from_content":"Original line\n","to_content":"New line\n","lines_above":0,"lines_below":0,"outdated":false}]},{"id":675,"note":"Numquam est at dolor quo et sed eligendi similique.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.720Z","updated_at":"2016-06-14T15:02:56.720Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":676,"note":"Et perferendis aliquam sunt nisi labore delectus.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.742Z","updated_at":"2016-06-14T15:02:56.742Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":677,"note":"Aut ex rerum et in.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.791Z","updated_at":"2016-06-14T15:02:56.791Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":678,"note":"Dolor laborum earum ut exercitationem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:56.814Z","updated_at":"2016-06-14T15:02:56.814Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"resource_label_events":[{"id":243,"action":"add","issue_id":null,"merge_request_id":27,"label_id":null,"user_id":1,"created_at":"2018-08-28T08:24:00.494Z"}],"merge_request_diff":{"id":27,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":27,"relative_order":0,"sha":"bb5206fee213d983da88c47f9cf4cc6caf9c66dc","message":"Feature conflict added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-08-06T08:35:52.000+02:00","committed_date":"2014-08-06T08:35:52.000+02:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":1,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","committed_date":"2014-02-27T10:01:38.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":2,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","committed_date":"2014-02-27T09:57:31.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":3,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","committed_date":"2014-02-27T09:54:21.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":4,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","committed_date":"2014-02-27T09:49:50.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":5,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","committed_date":"2014-02-27T09:48:32.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":27,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":3,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,4 @@\n+# This file was changed in feature branch\n+# We put different code here to make merge conflict\n+class Conflict\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":5,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":6,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":8,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":27,"created_at":"2016-06-14T15:02:36.572Z","updated_at":"2016-06-14T15:02:36.658Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"9"},"events":[{"id":221,"target_type":"MergeRequest","target_id":27,"project_id":36,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1},{"id":187,"target_type":"MergeRequest","target_id":27,"project_id":5,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1}],"approvals_before_merge":1,"award_emoji":[{"id":1,"name":"thumbsup","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"},{"id":2,"name":"drum","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"}]}
-{"id":26,"target_branch":"master","source_branch":"feature","source_project_id":4,"author_id":1,"assignee_id":null,"title":"MR2","created_at":"2016-06-14T15:02:36.418Z","updated_at":"2016-06-14T15:02:57.013Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":8,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":679,"note":"Qui rerum totam nisi est.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.848Z","updated_at":"2016-06-14T15:02:56.848Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":680,"note":"Pariatur magni corrupti consequatur debitis minima error beatae voluptatem.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.871Z","updated_at":"2016-06-14T15:02:56.871Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":681,"note":"Qui quis ut modi eos rerum ratione.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.895Z","updated_at":"2016-06-14T15:02:56.895Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":682,"note":"Illum quidem expedita mollitia fugit.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.918Z","updated_at":"2016-06-14T15:02:56.918Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":683,"note":"Consectetur voluptate sit sint possimus veritatis quod.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.942Z","updated_at":"2016-06-14T15:02:56.942Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":684,"note":"Natus libero quibusdam rem assumenda deleniti accusamus sed earum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.966Z","updated_at":"2016-06-14T15:02:56.966Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":685,"note":"Tenetur autem nihil rerum odit.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.989Z","updated_at":"2016-06-14T15:02:56.989Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":686,"note":"Quia maiores et odio sed.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:57.012Z","updated_at":"2016-06-14T15:02:57.012Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":26,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":26,"sha":"0b4bc9a49b562e85de7cc9e834518ea6828729b9","relative_order":0,"message":"Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:26:01.000+01:00","committed_date":"2014-02-27T09:26:01.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":26,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":26,"created_at":"2016-06-14T15:02:36.421Z","updated_at":"2016-06-14T15:02:36.474Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"1"},"events":[{"id":222,"target_type":"MergeRequest","target_id":26,"project_id":36,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1},{"id":186,"target_type":"MergeRequest","target_id":26,"project_id":5,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1}]}
+{"id":27,"target_branch":"feature","source_branch":"feature_conflict","source_project_id":2147483547,"author_id":1,"assignee_id":null,"title":"MR1","created_at":"2016-06-14T15:02:36.568Z","updated_at":"2016-06-14T15:02:56.815Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":9,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"diff_head_sha":"HEAD","source_branch_sha":"ABCD","target_branch_sha":"DCBA","merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":true,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":669,"note":"added 3 commits\n\n<ul><li>16ea4e20...074a2a32 - 2 commits from branch <code>master</code></li><li>ca223a02 - readme: fix typos</li></ul>\n\n[Compare with previous version](/group/project/merge_requests/1/diffs?diff_id=1189&start_sha=16ea4e207fb258fe4e9c73185a725207c9a4f3e1)","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4789,"commit_count":3,"action":"commit","created_at":"2020-03-28T12:47:33.461Z","updated_at":"2020-03-28T12:47:33.461Z"},"events":[],"suggestions":[]},{"id":670,"note":"unmarked as a **Work In Progress**","noteable_type":"MergeRequest","author_id":26,"created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"system":true,"st_diff":null,"updated_by_id":null,"position":null,"original_position":null,"resolved_at":null,"resolved_by_id":null,"discussion_id":null,"change_position":null,"resolved_by_push":null,"confidential":null,"type":null,"author":{"name":"User 4"},"award_emoji":[],"system_note_metadata":{"id":4790,"commit_count":null,"action":"title","created_at":"2020-03-28T12:48:36.951Z","updated_at":"2020-03-28T12:48:36.951Z"},"events":[],"suggestions":[]},{"id":671,"note":"Sit voluptatibus eveniet architecto quidem.","note_html":"<p>something else entirely</p>","cached_markdown_version":917504,"noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.632Z","updated_at":"2016-06-14T15:02:56.632Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[],"award_emoji":[{"id":1,"name":"tada","user_id":1,"awardable_type":"Note","awardable_id":1,"created_at":"2019-11-05T15:37:21.287Z","updated_at":"2019-11-05T15:37:21.287Z"}]},{"id":672,"note":"Odio maxime ratione voluptatibus sed.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.656Z","updated_at":"2016-06-14T15:02:56.656Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":673,"note":"Et deserunt et omnis nihil excepturi accusantium.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.679Z","updated_at":"2016-06-14T15:02:56.679Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":674,"note":"Saepe asperiores exercitationem non dignissimos laborum reiciendis et ipsum.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.700Z","updated_at":"2016-06-14T15:02:56.700Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[],"suggestions":[{"id":1,"note_id":674,"relative_order":0,"applied":false,"commit_id":null,"from_content":"Original line\n","to_content":"New line\n","lines_above":0,"lines_below":0,"outdated":false}]},{"id":675,"note":"Numquam est at dolor quo et sed eligendi similique.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.720Z","updated_at":"2016-06-14T15:02:56.720Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":676,"note":"Et perferendis aliquam sunt nisi labore delectus.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.742Z","updated_at":"2016-06-14T15:02:56.742Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":677,"note":"Aut ex rerum et in.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.791Z","updated_at":"2016-06-14T15:02:56.791Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":678,"note":"Dolor laborum earum ut exercitationem.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:56.814Z","updated_at":"2016-06-14T15:02:56.814Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":27,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"resource_label_events":[{"id":243,"action":"add","issue_id":null,"merge_request_id":27,"label_id":null,"user_id":1,"created_at":"2018-08-28T08:24:00.494Z"}],"merge_request_diff":{"id":27,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":27,"relative_order":0,"sha":"bb5206fee213d983da88c47f9cf4cc6caf9c66dc","message":"Feature conflict added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-08-06T08:35:52.000+02:00","committed_date":"2014-08-06T08:35:52.000+02:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":1,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","committed_date":"2014-02-27T10:01:38.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":2,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","committed_date":"2014-02-27T09:57:31.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":3,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","committed_date":"2014-02-27T09:54:21.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":4,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","committed_date":"2014-02-27T09:49:50.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":27,"relative_order":5,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","committed_date":"2014-02-27T09:48:32.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":27,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":3,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":27,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,4 @@\n+# This file was changed in feature branch\n+# We put different code here to make merge conflict\n+class Conflict\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":5,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":6,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":27,"relative_order":8,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":27,"created_at":"2016-06-14T15:02:36.572Z","updated_at":"2016-06-14T15:02:36.658Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"9"},"events":[{"id":221,"target_type":"MergeRequest","target_id":27,"project_id":36,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1},{"id":187,"target_type":"MergeRequest","target_id":27,"project_id":5,"created_at":"2016-06-14T15:02:36.703Z","updated_at":"2016-06-14T15:02:36.703Z","action":1,"author_id":1}],"approvals_before_merge":1,"award_emoji":[{"id":1,"name":"thumbsup","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"},{"id":2,"name":"drum","user_id":1,"awardable_type":"MergeRequest","awardable_id":27,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-07T11:21:21.235Z"}],"merge_request_assignees":[{"user_id":1,"created_at":"2020-01-07T11:21:21.235Z","state":"unreviewed"},{"user_id":15,"created_at":"2020-01-08T11:21:21.235Z","state":"reviewed"},{"user_id":16,"created_at":"2020-01-09T11:21:21.235Z","state":"attention_requested"},{"user_id":6,"created_at":"2020-01-10T11:21:21.235Z","state":"unreviewed"}],"merge_request_reviewers":[{"user_id":1,"created_at":"2020-01-07T11:21:21.235Z","state":"unreviewed"},{"user_id":15,"created_at":"2020-01-08T11:21:21.235Z","state":"reviewed"},{"user_id":16,"created_at":"2020-01-09T11:21:21.235Z","state":"attention_requested"},{"user_id":6,"created_at":"2020-01-10T11:21:21.235Z","state":"unreviewed"}],"approvals":[{"user_id":1,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-08T11:21:21.235Z"},{"user_id":15,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-08T11:21:21.235Z"},{"user_id":16,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-08T11:21:21.235Z"},{"user_id":6,"created_at":"2020-01-07T11:21:21.235Z","updated_at":"2020-01-08T11:21:21.235Z"}]}
+{"id":26,"target_branch":"master","source_branch":"feature","source_project_id":4,"author_id":1,"assignee_id":null,"title":"MR2","created_at":"2016-06-14T15:02:36.418Z","updated_at":"2016-06-14T15:02:57.013Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":8,"description":null,"position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":679,"note":"Qui rerum totam nisi est.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:56.848Z","updated_at":"2016-06-14T15:02:56.848Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":680,"note":"Pariatur magni corrupti consequatur debitis minima error beatae voluptatem.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:56.871Z","updated_at":"2016-06-14T15:02:56.871Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":681,"note":"Qui quis ut modi eos rerum ratione.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:56.895Z","updated_at":"2016-06-14T15:02:56.895Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":682,"note":"Illum quidem expedita mollitia fugit.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:56.918Z","updated_at":"2016-06-14T15:02:56.918Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":683,"note":"Consectetur voluptate sit sint possimus veritatis quod.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:56.942Z","updated_at":"2016-06-14T15:02:56.942Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":684,"note":"Natus libero quibusdam rem assumenda deleniti accusamus sed earum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:56.966Z","updated_at":"2016-06-14T15:02:56.966Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":685,"note":"Tenetur autem nihil rerum odit.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:56.989Z","updated_at":"2016-06-14T15:02:56.989Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":686,"note":"Quia maiores et odio sed.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:57.012Z","updated_at":"2016-06-14T15:02:57.012Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":26,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":26,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":26,"sha":"0b4bc9a49b562e85de7cc9e834518ea6828729b9","relative_order":0,"message":"Feature added\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:26:01.000+01:00","committed_date":"2014-02-27T09:26:01.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":26,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/files/ruby/feature.rb\n@@ -0,0 +1,5 @@\n+class Feature\n+ def foo\n+ puts 'bar'\n+ end\n+end\n","new_path":"files/ruby/feature.rb","old_path":"files/ruby/feature.rb","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":26,"created_at":"2016-06-14T15:02:36.421Z","updated_at":"2016-06-14T15:02:36.474Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"1"},"events":[{"id":222,"target_type":"MergeRequest","target_id":26,"project_id":36,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1},{"id":186,"target_type":"MergeRequest","target_id":26,"project_id":5,"created_at":"2016-06-14T15:02:36.496Z","updated_at":"2016-06-14T15:02:36.496Z","action":1,"author_id":1}],"merge_request_assignees":[],"merge_request_reviewers":[],"approvals":[]}
{"id":15,"target_branch":"test-7","source_branch":"test-1","source_project_id":5,"author_id":22,"assignee_id":16,"title":"Qui accusantium et inventore facilis doloribus occaecati officiis.","created_at":"2016-06-14T15:02:25.168Z","updated_at":"2016-06-14T15:02:59.521Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":7,"description":"Et commodi deserunt aspernatur vero rerum. Ut non dolorum alias in odit est libero. Voluptatibus eos in et vitae repudiandae facilis ex mollitia.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":777,"note":"Pariatur voluptas placeat aspernatur culpa suscipit soluta.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.348Z","updated_at":"2016-06-14T15:02:59.348Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":778,"note":"Alias et iure mollitia suscipit molestiae voluptatum nostrum asperiores.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.372Z","updated_at":"2016-06-14T15:02:59.372Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":779,"note":"Laudantium qui eum qui sunt.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.395Z","updated_at":"2016-06-14T15:02:59.395Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":780,"note":"Quas rem est iusto ut delectus fugiat recusandae mollitia.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.418Z","updated_at":"2016-06-14T15:02:59.418Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":781,"note":"Repellendus ab et qui nesciunt.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.444Z","updated_at":"2016-06-14T15:02:59.444Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":782,"note":"Non possimus voluptatum odio qui ut.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.469Z","updated_at":"2016-06-14T15:02:59.469Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":783,"note":"Dolores repellendus eum ducimus quam ab dolorem quia.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.494Z","updated_at":"2016-06-14T15:02:59.494Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":784,"note":"Facilis dolorem aut corrupti id ratione occaecati.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.520Z","updated_at":"2016-06-14T15:02:59.520Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":15,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":15,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":15,"relative_order":0,"sha":"94b8d581c48d894b86661718582fecbc5e3ed2eb","message":"fixes #10\n","authored_date":"2016-01-19T13:22:56.000+01:00","committed_date":"2016-01-19T13:22:56.000+01:00","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}}],"merge_request_diff_files":[{"merge_request_diff_id":15,"relative_order":0,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":15,"created_at":"2016-06-14T15:02:25.171Z","updated_at":"2016-06-14T15:02:25.230Z","base_commit_sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","real_size":"1"},"events":[{"id":223,"target_type":"MergeRequest","target_id":15,"project_id":36,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":1},{"id":175,"target_type":"MergeRequest","target_id":15,"project_id":5,"created_at":"2016-06-14T15:02:25.262Z","updated_at":"2016-06-14T15:02:25.262Z","action":1,"author_id":22}]}
{"id":14,"target_branch":"fix","source_branch":"test-3","source_project_id":5,"author_id":20,"assignee_id":20,"title":"In voluptas aut sequi voluptatem ullam vel corporis illum consequatur.","created_at":"2016-06-14T15:02:24.760Z","updated_at":"2016-06-14T15:02:59.749Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":6,"description":"Dicta magnam non voluptates nam dignissimos nostrum deserunt. Dolorum et suscipit iure quae doloremque. Necessitatibus saepe aut labore sed.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":785,"note":"Atque cupiditate necessitatibus deserunt minus natus odit.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.559Z","updated_at":"2016-06-14T15:02:59.559Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[]},{"id":786,"note":"Non dolorem provident mollitia nesciunt optio ex eveniet.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.587Z","updated_at":"2016-06-14T15:02:59.587Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":787,"note":"Similique officia nemo quasi commodi accusantium quae qui.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.621Z","updated_at":"2016-06-14T15:02:59.621Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":788,"note":"Et est et alias ad dolor qui.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.650Z","updated_at":"2016-06-14T15:02:59.650Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":789,"note":"Numquam temporibus ratione voluptatibus aliquid.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.675Z","updated_at":"2016-06-14T15:02:59.675Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":790,"note":"Ut ex aliquam consectetur perferendis est hic aut quia.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.703Z","updated_at":"2016-06-14T15:02:59.703Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":791,"note":"Esse eos quam quaerat aut ut asperiores officiis.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.726Z","updated_at":"2016-06-14T15:02:59.726Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":792,"note":"Sint facilis accusantium iure blanditiis.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.748Z","updated_at":"2016-06-14T15:02:59.748Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":14,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":14,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":14,"relative_order":0,"sha":"ddd4ff416a931589c695eb4f5b23f844426f6928","message":"fixes #10\n","authored_date":"2016-01-19T14:14:43.000+01:00","committed_date":"2016-01-19T14:14:43.000+01:00","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":14,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","committed_date":"2015-12-07T12:52:12.000+01:00","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":14,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","committed_date":"2015-12-07T11:54:28.000+01:00","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":14,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","committed_date":"2015-11-13T16:27:12.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","committed_date":"2015-11-13T08:50:17.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","committed_date":"2015-11-13T08:39:43.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","committed_date":"2015-11-13T07:21:40.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","committed_date":"2015-11-13T06:01:27.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","committed_date":"2015-11-13T06:00:16.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","committed_date":"2015-11-13T05:23:14.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":14,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","committed_date":"2015-11-13T05:08:45.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","committed_date":"2015-11-13T05:08:04.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":14,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","committed_date":"2015-08-25T17:53:12.000+02:00","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}},{"merge_request_diff_id":14,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","committed_date":"2015-01-10T22:23:29.000+01:00","commit_author":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"},"committer":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"}},{"merge_request_diff_id":14,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","committed_date":"2015-01-10T21:28:18.000+01:00","commit_author":{"name":"marmis85","email":"marmis85@gmail.com"},"committer":{"name":"marmis85","email":"marmis85@gmail.com"}},{"merge_request_diff_id":14,"relative_order":15,"sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","message":"Add submodule from gitlab.com\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T10:01:38.000+01:00","committed_date":"2014-02-27T10:01:38.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":16,"sha":"570e7b2abdd848b95f2f578043fc23bd6f6fd24d","message":"Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:57:31.000+01:00","committed_date":"2014-02-27T09:57:31.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":17,"sha":"6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9","message":"More submodules\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:54:21.000+01:00","committed_date":"2014-02-27T09:54:21.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":18,"sha":"d14d6c0abdd253381df51a723d58691b2ee1ab08","message":"Remove ds_store files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:49:50.000+01:00","committed_date":"2014-02-27T09:49:50.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}},{"merge_request_diff_id":14,"relative_order":19,"sha":"c1acaa58bbcbc3eafe538cb8274ba387047b69f8","message":"Ignore DS files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n","authored_date":"2014-02-27T09:48:32.000+01:00","committed_date":"2014-02-27T09:48:32.000+01:00","commit_author":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"},"committer":{"name":"Dmitriy Zaporozhets","email":"dmitriy.zaporozhets@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":14,"relative_order":0,"utf8_diff":"Binary files a/.DS_Store and /dev/null differ\n","new_path":".DS_Store","old_path":".DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":1,"utf8_diff":"--- a/.gitignore\n+++ b/.gitignore\n@@ -17,3 +17,4 @@ rerun.txt\n pickle-email-*.html\n .project\n config/initializers/secret_token.rb\n+.DS_Store\n","new_path":".gitignore","old_path":".gitignore","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":2,"utf8_diff":"--- a/.gitmodules\n+++ b/.gitmodules\n@@ -1,3 +1,9 @@\n [submodule \"six\"]\n \tpath = six\n \turl = git://github.com/randx/six.git\n+[submodule \"gitlab-shell\"]\n+\tpath = gitlab-shell\n+\turl = https://github.com/gitlabhq/gitlab-shell.git\n+[submodule \"gitlab-grack\"]\n+\tpath = gitlab-grack\n+\turl = https://gitlab.com/gitlab-org/gitlab-grack.git\n","new_path":".gitmodules","old_path":".gitmodules","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":3,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":5,"utf8_diff":"Binary files a/files/.DS_Store and /dev/null differ\n","new_path":"files/.DS_Store","old_path":"files/.DS_Store","a_mode":"100644","b_mode":"0","new_file":false,"renamed_file":false,"deleted_file":true,"too_large":false},{"merge_request_diff_id":14,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":7,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":8,"utf8_diff":"--- a/files/ruby/popen.rb\n+++ b/files/ruby/popen.rb\n@@ -6,12 +6,18 @@ module Popen\n \n def popen(cmd, path=nil)\n unless cmd.is_a?(Array)\n- raise \"System commands must be given as an array of strings\"\n+ raise RuntimeError, \"System commands must be given as an array of strings\"\n end\n \n path ||= Dir.pwd\n- vars = { \"PWD\" => path }\n- options = { chdir: path }\n+\n+ vars = {\n+ \"PWD\" => path\n+ }\n+\n+ options = {\n+ chdir: path\n+ }\n \n unless File.directory?(path)\n FileUtils.mkdir_p(path)\n@@ -19,6 +25,7 @@ module Popen\n \n @cmd_output = \"\"\n @cmd_status = 0\n+\n Open3.popen3(vars, *cmd, options) do |stdin, stdout, stderr, wait_thr|\n @cmd_output << stdout.read\n @cmd_output << stderr.read\n","new_path":"files/ruby/popen.rb","old_path":"files/ruby/popen.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":9,"utf8_diff":"--- a/files/ruby/regex.rb\n+++ b/files/ruby/regex.rb\n@@ -19,14 +19,12 @@ module Gitlab\n end\n \n def archive_formats_regex\n- #|zip|tar| tar.gz | tar.bz2 |\n- /(zip|tar|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n+ /(zip|tar|7z|tar\\.gz|tgz|gz|tar\\.bz2|tbz|tbz2|tb2|bz2)/\n end\n \n def git_reference_regex\n # Valid git ref regex, see:\n # https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html\n-\n %r{\n (?!\n (?# doesn't begins with)\n","new_path":"files/ruby/regex.rb","old_path":"files/ruby/regex.rb","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":10,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":11,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":12,"utf8_diff":"--- /dev/null\n+++ b/gitlab-grack\n@@ -0,0 +1 @@\n+Subproject commit 645f6c4c82fd3f5e06f67134450a570b795e55a6\n","new_path":"gitlab-grack","old_path":"gitlab-grack","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":13,"utf8_diff":"--- /dev/null\n+++ b/gitlab-shell\n@@ -0,0 +1 @@\n+Subproject commit 79bceae69cb5750d6567b223597999bfa91cb3b9\n","new_path":"gitlab-shell","old_path":"gitlab-shell","a_mode":"0","b_mode":"160000","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":14,"relative_order":14,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":14,"created_at":"2016-06-14T15:02:24.770Z","updated_at":"2016-06-14T15:02:25.007Z","base_commit_sha":"ae73cb07c9eeaf35924a10f713b364d32b2dd34f","real_size":"15"},"events":[{"id":224,"target_type":"MergeRequest","target_id":14,"project_id":36,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":1},{"id":174,"target_type":"MergeRequest","target_id":14,"project_id":5,"created_at":"2016-06-14T15:02:25.113Z","updated_at":"2016-06-14T15:02:25.113Z","action":1,"author_id":20}]}
{"id":13,"target_branch":"improve/awesome","source_branch":"test-8","source_project_id":5,"author_id":16,"assignee_id":25,"title":"Voluptates consequatur eius nemo amet libero animi illum delectus tempore.","created_at":"2016-06-14T15:02:24.415Z","updated_at":"2016-06-14T15:02:59.958Z","state":"opened","merge_status":"unchecked","target_project_id":5,"iid":5,"description":"Est eaque quasi qui qui. Similique voluptatem impedit iusto ratione reprehenderit. Itaque est illum ut nulla aut.","position":0,"updated_by_id":null,"merge_error":null,"merge_params":{"force_remove_source_branch":null},"merge_when_pipeline_succeeds":false,"merge_user_id":null,"merge_commit_sha":null,"notes":[{"id":793,"note":"In illum maxime aperiam nulla est aspernatur.","noteable_type":"MergeRequest","author_id":26,"created_at":"2016-06-14T15:02:59.782Z","updated_at":"2016-06-14T15:02:59.782Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 4"},"events":[{"merge_request_diff_id":14,"id":529,"target_type":"Note","target_id":793,"project_id":4,"created_at":"2016-07-07T14:35:12.128Z","updated_at":"2016-07-07T14:35:12.128Z","action":6,"author_id":1}]},{"id":794,"note":"Enim quia perferendis cum distinctio tenetur optio voluptas veniam.","noteable_type":"MergeRequest","author_id":25,"created_at":"2016-06-14T15:02:59.807Z","updated_at":"2016-06-14T15:02:59.807Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 3"},"events":[]},{"id":795,"note":"Dolor ad quia quis pariatur ducimus.","noteable_type":"MergeRequest","author_id":22,"created_at":"2016-06-14T15:02:59.831Z","updated_at":"2016-06-14T15:02:59.831Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"User 0"},"events":[]},{"id":796,"note":"Et a odio voluptate aut.","noteable_type":"MergeRequest","author_id":20,"created_at":"2016-06-14T15:02:59.854Z","updated_at":"2016-06-14T15:02:59.854Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ottis Schuster II"},"events":[]},{"id":797,"note":"Quis nihil temporibus voluptatum modi minima a ut.","noteable_type":"MergeRequest","author_id":16,"created_at":"2016-06-14T15:02:59.879Z","updated_at":"2016-06-14T15:02:59.879Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Rhett Emmerich IV"},"events":[]},{"id":798,"note":"Ut alias consequatur in nostrum.","noteable_type":"MergeRequest","author_id":15,"created_at":"2016-06-14T15:02:59.904Z","updated_at":"2016-06-14T15:02:59.904Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Burdette Bernier"},"events":[]},{"id":799,"note":"Voluptatibus aperiam assumenda et neque sint libero.","noteable_type":"MergeRequest","author_id":6,"created_at":"2016-06-14T15:02:59.926Z","updated_at":"2016-06-14T15:02:59.926Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Ari Wintheiser"},"events":[]},{"id":800,"note":"Veritatis voluptatem dolor dolores magni quo ut ipsa fuga.","noteable_type":"MergeRequest","author_id":1,"created_at":"2016-06-14T15:02:59.956Z","updated_at":"2016-06-14T15:02:59.956Z","project_id":5,"attachment":{"url":null},"line_code":null,"commit_id":null,"noteable_id":13,"system":false,"st_diff":null,"updated_by_id":null,"author":{"name":"Administrator"},"events":[]}],"merge_request_diff":{"id":13,"state":"collected","merge_request_diff_commits":[{"merge_request_diff_id":13,"relative_order":0,"sha":"0bfedc29d30280c7e8564e19f654584b459e5868","message":"fixes #10\n","authored_date":"2016-01-19T15:25:23.000+01:00","committed_date":"2016-01-19T15:25:23.000+01:00","commit_author":{"name":"James Lopez","email":"james@jameslopez.es"},"committer":{"name":"James Lopez","email":"james@jameslopez.es"}},{"merge_request_diff_id":13,"relative_order":1,"sha":"be93687618e4b132087f430a4d8fc3a609c9b77c","message":"Merge branch 'master' into 'master'\r\n\r\nLFS object pointer.\r\n\r\n\r\n\r\nSee merge request !6","authored_date":"2015-12-07T12:52:12.000+01:00","committed_date":"2015-12-07T12:52:12.000+01:00","commit_author":{"name":"Marin Jankovski","email":"marin@gitlab.com"},"committer":{"name":"Marin Jankovski","email":"marin@gitlab.com"}},{"merge_request_diff_id":13,"relative_order":2,"sha":"048721d90c449b244b7b4c53a9186b04330174ec","message":"LFS object pointer.\n","authored_date":"2015-12-07T11:54:28.000+01:00","committed_date":"2015-12-07T11:54:28.000+01:00","commit_author":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"},"committer":{"name":"Marin Jankovski","email":"maxlazio@gmail.com"}},{"merge_request_diff_id":13,"relative_order":3,"sha":"5f923865dde3436854e9ceb9cdb7815618d4e849","message":"GitLab currently doesn't support patches that involve a merge commit: add a commit here\n","authored_date":"2015-11-13T16:27:12.000+01:00","committed_date":"2015-11-13T16:27:12.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":4,"sha":"d2d430676773caa88cdaf7c55944073b2fd5561a","message":"Merge branch 'add-svg' into 'master'\r\n\r\nAdd GitLab SVG\r\n\r\nAdded to test preview of sanitized SVG images\r\n\r\nSee merge request !5","authored_date":"2015-11-13T08:50:17.000+01:00","committed_date":"2015-11-13T08:50:17.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":5,"sha":"2ea1f3dec713d940208fb5ce4a38765ecb5d3f73","message":"Add GitLab SVG\n","authored_date":"2015-11-13T08:39:43.000+01:00","committed_date":"2015-11-13T08:39:43.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":6,"sha":"59e29889be61e6e0e5e223bfa9ac2721d31605b8","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd whitespace test file\r\n\r\nSorry, I did a mistake.\r\nGit ignore empty files.\r\nSo I add a new whitespace test file.\r\n\r\nSee merge request !4","authored_date":"2015-11-13T07:21:40.000+01:00","committed_date":"2015-11-13T07:21:40.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":7,"sha":"66eceea0db202bb39c4e445e8ca28689645366c5","message":"add spaces in whitespace file\n","authored_date":"2015-11-13T06:01:27.000+01:00","committed_date":"2015-11-13T06:01:27.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":8,"sha":"08f22f255f082689c0d7d39d19205085311542bc","message":"remove empty file.(beacase git ignore empty file)\nadd whitespace test file.\n","authored_date":"2015-11-13T06:00:16.000+01:00","committed_date":"2015-11-13T06:00:16.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":9,"sha":"19e2e9b4ef76b422ce1154af39a91323ccc57434","message":"Merge branch 'whitespace' into 'master'\r\n\r\nadd spaces\r\n\r\nTo test this pull request.(https://github.com/gitlabhq/gitlabhq/pull/9757)\r\nJust add whitespaces.\r\n\r\nSee merge request !3","authored_date":"2015-11-13T05:23:14.000+01:00","committed_date":"2015-11-13T05:23:14.000+01:00","commit_author":{"name":"Stan Hu","email":"stanhu@gmail.com"},"committer":{"name":"Stan Hu","email":"stanhu@gmail.com"}},{"merge_request_diff_id":13,"relative_order":10,"sha":"c642fe9b8b9f28f9225d7ea953fe14e74748d53b","message":"add whitespace in empty\n","authored_date":"2015-11-13T05:08:45.000+01:00","committed_date":"2015-11-13T05:08:45.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":11,"sha":"9a944d90955aaf45f6d0c88f30e27f8d2c41cec0","message":"add empty file\n","authored_date":"2015-11-13T05:08:04.000+01:00","committed_date":"2015-11-13T05:08:04.000+01:00","commit_author":{"name":"윤민식","email":"minsik.yoon@samsung.com"},"committer":{"name":"윤민식","email":"minsik.yoon@samsung.com"}},{"merge_request_diff_id":13,"relative_order":12,"sha":"c7fbe50c7c7419d9701eebe64b1fdacc3df5b9dd","message":"Add ISO-8859 test file\n","authored_date":"2015-08-25T17:53:12.000+02:00","committed_date":"2015-08-25T17:53:12.000+02:00","commit_author":{"name":"Stan Hu","email":"stanhu@packetzoom.com"},"committer":{"name":"Stan Hu","email":"stanhu@packetzoom.com"}},{"merge_request_diff_id":13,"relative_order":13,"sha":"e56497bb5f03a90a51293fc6d516788730953899","message":"Merge branch 'tree_helper_spec' into 'master'\n\nAdd directory structure for tree_helper spec\n\nThis directory structure is needed for a testing the method flatten_tree(tree) in the TreeHelper module\n\nSee [merge request #275](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/275#note_732774)\n\nSee merge request !2\n","authored_date":"2015-01-10T22:23:29.000+01:00","committed_date":"2015-01-10T22:23:29.000+01:00","commit_author":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"},"committer":{"name":"Sytse Sijbrandij","email":"sytse@gitlab.com"}},{"merge_request_diff_id":13,"relative_order":14,"sha":"4cd80ccab63c82b4bad16faa5193fbd2aa06df40","message":"add directory structure for tree_helper spec\n","authored_date":"2015-01-10T21:28:18.000+01:00","committed_date":"2015-01-10T21:28:18.000+01:00","commit_author":{"name":"marmis85","email":"marmis85@gmail.com"},"committer":{"name":"marmis85","email":"marmis85@gmail.com"}}],"merge_request_diff_files":[{"merge_request_diff_id":13,"relative_order":0,"utf8_diff":"--- a/CHANGELOG\n+++ b/CHANGELOG\n@@ -1,4 +1,6 @@\n-v 6.7.0\n+v6.8.0\n+\n+v6.7.0\n - Add support for Gemnasium as a Project Service (Olivier Gonzalez)\n - Add edit file button to MergeRequest diff\n - Public groups (Jason Hollingsworth)\n","new_path":"CHANGELOG","old_path":"CHANGELOG","a_mode":"100644","b_mode":"100644","new_file":false,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":1,"utf8_diff":"--- /dev/null\n+++ b/encoding/iso8859.txt\n@@ -0,0 +1 @@\n+Äü\n","new_path":"encoding/iso8859.txt","old_path":"encoding/iso8859.txt","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":2,"utf8_diff":"--- /dev/null\n+++ b/files/images/wm.svg\n@@ -0,0 +1,78 @@\n+<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n+<svg width=\"1300px\" height=\"680px\" viewBox=\"0 0 1300 680\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:sketch=\"http://www.bohemiancoding.com/sketch/ns\">\n+ <!-- Generator: Sketch 3.2.2 (9983) - http://www.bohemiancoding.com/sketch -->\n+ <title>wm</title>\n+ <desc>Created with Sketch.</desc>\n+ <defs>\n+ <path id=\"path-1\" d=\"M-69.8,1023.54607 L1675.19996,1023.54607 L1675.19996,0 L-69.8,0 L-69.8,1023.54607 L-69.8,1023.54607 Z\"></path>\n+ </defs>\n+ <g id=\"Page-1\" stroke=\"none\" stroke-width=\"1\" fill=\"none\" fill-rule=\"evenodd\" sketch:type=\"MSPage\">\n+ <path d=\"M1300,680 L0,680 L0,0 L1300,0 L1300,680 L1300,680 Z\" id=\"bg\" fill=\"#30353E\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"gitlab_logo\" sketch:type=\"MSLayerGroup\" transform=\"translate(-262.000000, -172.000000)\">\n+ <g id=\"g10\" transform=\"translate(872.500000, 512.354581) scale(1, -1) translate(-872.500000, -512.354581) translate(0.000000, 0.290751)\">\n+ <g id=\"g12\" transform=\"translate(1218.022652, 440.744871)\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\">\n+ <path d=\"M-50.0233338,141.900706 L-69.07059,141.900706 L-69.0100967,0.155858152 L8.04444805,0.155858152 L8.04444805,17.6840847 L-49.9628405,17.6840847 L-50.0233338,141.900706 L-50.0233338,141.900706 Z\" id=\"path14\"></path>\n+ </g>\n+ <g id=\"g16\">\n+ <g id=\"g18-Clipped\">\n+ <mask id=\"mask-2\" sketch:name=\"path22\" fill=\"white\">\n+ <use xlink:href=\"#path-1\"></use>\n+ </mask>\n+ <g id=\"path22\"></g>\n+ <g id=\"g18\" mask=\"url(#mask-2)\">\n+ <g transform=\"translate(382.736659, 312.879425)\">\n+ <g id=\"g24\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(852.718192, 124.992771)\">\n+ <path d=\"M63.9833317,27.9148929 C59.2218085,22.9379001 51.2134221,17.9597442 40.3909323,17.9597442 C25.8888194,17.9597442 20.0453962,25.1013043 20.0453962,34.4074318 C20.0453962,48.4730484 29.7848226,55.1819277 50.5642821,55.1819277 C54.4602853,55.1819277 60.7364685,54.7492469 63.9833317,54.1002256 L63.9833317,27.9148929 L63.9833317,27.9148929 Z M44.2869356,113.827628 C28.9053426,113.827628 14.7975996,108.376082 3.78897657,99.301416 L10.5211864,87.6422957 C18.3131929,92.1866076 27.8374026,96.7320827 41.4728323,96.7320827 C57.0568452,96.7320827 63.9833317,88.7239978 63.9833317,75.3074024 L63.9833317,68.3821827 C60.9528485,69.0312039 54.6766653,69.4650479 50.7806621,69.4650479 C17.4476729,69.4650479 0.565379986,57.7791759 0.565379986,33.3245665 C0.565379986,11.4683685 13.9844297,0.43151772 34.3299658,0.43151772 C48.0351955,0.43151772 61.1692285,6.70771614 65.7143717,16.8780421 L69.1776149,3.02876588 L82.5978279,3.02876588 L82.5978279,75.5237428 C82.5978279,98.462806 72.6408582,113.827628 44.2869356,113.827628 L44.2869356,113.827628 Z\" id=\"path26\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g28\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(959.546624, 124.857151)\">\n+ <path d=\"M37.2266657,17.4468081 C30.0837992,17.4468081 23.8064527,18.3121698 19.0449295,20.4767371 L19.0449295,79.2306079 L19.0449295,86.0464943 C25.538656,91.457331 33.5470425,95.3526217 43.7203922,95.3526217 C62.1173451,95.3526217 69.2602116,82.3687072 69.2602116,61.3767077 C69.2602116,31.5135879 57.7885819,17.4468081 37.2266657,17.4468081 M45.2315622,113.963713 C28.208506,113.963713 19.0449295,102.384849 19.0449295,102.384849 L19.0449295,120.67143 L18.9844362,144.908535 L10.3967097,144.908535 L0.371103324,144.908535 L0.431596656,6.62629771 C9.73826309,2.73100702 22.5081728,0.567602823 36.3611458,0.567602823 C71.8579349,0.567602823 88.9566078,23.2891625 88.9566078,62.4584098 C88.9566078,93.4043948 73.1527248,113.963713 45.2315622,113.963713\" id=\"path30\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g32\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(509.576747, 125.294950)\">\n+ <path d=\"M68.636665,129.10638 C85.5189579,129.10638 96.3414476,123.480366 103.484314,117.853189 L111.669527,132.029302 C100.513161,141.811145 85.5073245,147.06845 69.5021849,147.06845 C29.0274926,147.06845 0.673569983,122.3975 0.673569983,72.6252464 C0.673569983,20.4709215 31.2622559,0.12910638 66.2553217,0.12910638 C83.7879179,0.12910638 98.7227909,4.24073748 108.462217,8.35236859 L108.063194,64.0763105 L108.063194,70.6502677 L108.063194,81.6057001 L56.1168719,81.6057001 L56.1168719,64.0763105 L89.2323178,64.0763105 L89.6313411,21.7701271 C85.3025779,19.6055598 77.7269514,17.8748364 67.554765,17.8748364 C39.4172223,17.8748364 20.5863462,35.5717154 20.5863462,72.8415868 C20.5863462,110.711628 40.0663623,129.10638 68.636665,129.10638\" id=\"path34\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g36\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(692.388992, 124.376085)\">\n+ <path d=\"M19.7766662,145.390067 L1.16216997,145.390067 L1.2226633,121.585642 L1.2226633,111.846834 L1.2226633,106.170806 L1.2226633,96.2656714 L1.2226633,39.5681976 L1.2226633,39.3518572 C1.2226633,16.4127939 11.1796331,1.04797161 39.5335557,1.04797161 C43.4504989,1.04797161 47.2836822,1.40388649 51.0051854,2.07965952 L51.0051854,18.7925385 C48.3109055,18.3796307 45.4351455,18.1446804 42.3476589,18.1446804 C26.763646,18.1446804 19.8371595,26.1516022 19.8371595,39.5681976 L19.8371595,96.2656714 L51.0051854,96.2656714 L51.0051854,111.846834 L19.8371595,111.846834 L19.7766662,145.390067 L19.7766662,145.390067 Z\" id=\"path38\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <path d=\"M646.318899,128.021188 L664.933395,128.021188 L664.933395,236.223966 L646.318899,236.223966 L646.318899,128.021188 L646.318899,128.021188 Z\" id=\"path40\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <path d=\"M646.318899,251.154944 L664.933395,251.154944 L664.933395,269.766036 L646.318899,269.766036 L646.318899,251.154944 L646.318899,251.154944 Z\" id=\"path42\" fill=\"#8C929D\" sketch:type=\"MSShapeGroup\"></path>\n+ <g id=\"g44\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.464170, 0.676006)\">\n+ <path d=\"M429.269989,169.815599 L405.225053,243.802859 L357.571431,390.440955 C355.120288,397.984955 344.444378,397.984955 341.992071,390.440955 L294.337286,243.802859 L136.094873,243.802859 L88.4389245,390.440955 C85.9877812,397.984955 75.3118715,397.984955 72.8595648,390.440955 L25.2059427,243.802859 L1.16216997,169.815599 C-1.03187664,163.067173 1.37156997,155.674379 7.11261982,151.503429 L215.215498,0.336141836 L423.319539,151.503429 C429.060589,155.674379 431.462873,163.067173 429.269989,169.815599\" id=\"path46\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g48\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(135.410135, 1.012147)\">\n+ <path d=\"M80.269998,0 L80.269998,0 L159.391786,243.466717 L1.14820997,243.466717 L80.269998,0 L80.269998,0 Z\" id=\"path50\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g52\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path54\"></g>\n+ </g>\n+ <g id=\"g56\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(24.893471, 1.012613)\">\n+ <path d=\"M190.786662,0 L111.664874,243.465554 L0.777106647,243.465554 L190.786662,0 L190.786662,0 Z\" id=\"path58\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g60\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <g id=\"path62\"></g>\n+ </g>\n+ <g id=\"g64\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(0.077245, 0.223203)\">\n+ <path d=\"M25.5933327,244.255313 L25.5933327,244.255313 L1.54839663,170.268052 C-0.644486651,163.519627 1.75779662,156.126833 7.50000981,151.957046 L215.602888,0.789758846 L25.5933327,244.255313 L25.5933327,244.255313 Z\" id=\"path66\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g68\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012147)\">\n+ <g id=\"path70\"></g>\n+ </g>\n+ <g id=\"g72\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(25.670578, 244.478283)\">\n+ <path d=\"M0,0 L110.887767,0 L63.2329818,146.638096 C60.7806751,154.183259 50.1047654,154.183259 47.6536221,146.638096 L0,0 L0,0 Z\" id=\"path74\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g76\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(215.680133, 1.012613)\">\n+ <path d=\"M0,0 L79.121788,243.465554 L190.009555,243.465554 L0,0 L0,0 Z\" id=\"path78\" fill=\"#FC6D26\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g80\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(214.902910, 0.223203)\">\n+ <path d=\"M190.786662,244.255313 L190.786662,244.255313 L214.831598,170.268052 C217.024481,163.519627 214.622198,156.126833 208.879985,151.957046 L0.777106647,0.789758846 L190.786662,244.255313 L190.786662,244.255313 Z\" id=\"path82\" fill=\"#FCA326\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ <g id=\"g84\" stroke-width=\"1\" fill=\"none\" sketch:type=\"MSLayerGroup\" transform=\"translate(294.009575, 244.478283)\">\n+ <path d=\"M111.679997,0 L0.79222998,0 L48.4470155,146.638096 C50.8993221,154.183259 61.5752318,154.183259 64.0263751,146.638096 L111.679997,0 L111.679997,0 Z\" id=\"path86\" fill=\"#E24329\" sketch:type=\"MSShapeGroup\"></path>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+ </g>\n+</svg>\n\\ No newline at end of file\n","new_path":"files/images/wm.svg","old_path":"files/images/wm.svg","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":3,"utf8_diff":"--- /dev/null\n+++ b/files/lfs/lfs_object.iso\n@@ -0,0 +1,4 @@\n+version https://git-lfs.github.com/spec/v1\n+oid sha256:91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897\n+size 1575078\n+\n","new_path":"files/lfs/lfs_object.iso","old_path":"files/lfs/lfs_object.iso","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":4,"utf8_diff":"--- /dev/null\n+++ b/files/whitespace\n@@ -0,0 +1 @@\n+test \n","new_path":"files/whitespace","old_path":"files/whitespace","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":5,"utf8_diff":"--- /dev/null\n+++ b/foo/bar/.gitkeep\n","new_path":"foo/bar/.gitkeep","old_path":"foo/bar/.gitkeep","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false},{"merge_request_diff_id":13,"relative_order":6,"utf8_diff":"--- /dev/null\n+++ b/test\n","new_path":"test","old_path":"test","a_mode":"0","b_mode":"100644","new_file":true,"renamed_file":false,"deleted_file":false,"too_large":false}],"merge_request_id":13,"created_at":"2016-06-14T15:02:24.420Z","updated_at":"2016-06-14T15:02:24.561Z","base_commit_sha":"5937ac0a7beb003549fc5fd26fc247adbce4a52e","real_size":"7"},"events":[{"id":225,"target_type":"MergeRequest","target_id":13,"project_id":36,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16},{"id":173,"target_type":"MergeRequest","target_id":13,"project_id":5,"created_at":"2016-06-14T15:02:24.636Z","updated_at":"2016-06-14T15:02:24.636Z","action":1,"author_id":16}]}
diff --git a/spec/fixtures/markdown.md.erb b/spec/fixtures/markdown.md.erb
index 2da16408fbc..18cd63b7bcb 100644
--- a/spec/fixtures/markdown.md.erb
+++ b/spec/fixtures/markdown.md.erb
@@ -275,9 +275,11 @@ References should be parseable even inside _<%= merge_request.to_reference %>_ e
- [ ] Incomplete task 1
- [x] Complete task 1
+- [~] Inapplicable task 1
- [ ] Incomplete task 2
- [ ] Incomplete sub-task 1
- [ ] Incomplete sub-task 2
+ - [~] Inapplicable sub-task 1
- [x] Complete sub-task 1
- [X] Complete task 2
diff --git a/spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom.sha1 b/spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom.sha1
new file mode 100644
index 00000000000..33b75ee37eb
--- /dev/null
+++ b/spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom.sha1
@@ -0,0 +1 @@
+78f664f030d2a684f59081e88b9461257e859c14
diff --git a/spec/fixtures/whats_new/20201225_01_01.yml b/spec/fixtures/whats_new/20201225_01_01.yml
index 7bf58900cc7..45f989be01c 100644
--- a/spec/fixtures/whats_new/20201225_01_01.yml
+++ b/spec/fixtures/whats_new/20201225_01_01.yml
@@ -1,7 +1,7 @@
---
-- title: It's gonna be a bright
- body: |
+- name: It's gonna be a bright
+ description: |
## It's gonna be a bright
self-managed: true
gitlab-com: false
- packages: ["Premium", "Ultimate"]
+ available_in: ["Premium", "Ultimate"]
diff --git a/spec/fixtures/whats_new/20201225_01_02.yml b/spec/fixtures/whats_new/20201225_01_02.yml
index 90b5192897e..baea00f7b11 100644
--- a/spec/fixtures/whats_new/20201225_01_02.yml
+++ b/spec/fixtures/whats_new/20201225_01_02.yml
@@ -1,7 +1,7 @@
---
-- title: bright
- body: |
+- name: bright
+ description: |
## bright
self-managed: true
gitlab-com: false
- packages: ["Premium", "Ultimate"]
+ available_in: ["Premium", "Ultimate"]
diff --git a/spec/fixtures/whats_new/20201225_01_04.yml b/spec/fixtures/whats_new/20201225_01_04.yml
index 0dfd0d780c7..918592aecfe 100644
--- a/spec/fixtures/whats_new/20201225_01_04.yml
+++ b/spec/fixtures/whats_new/20201225_01_04.yml
@@ -1,19 +1,19 @@
---
-- title: View epics on a board
- body: |
+- name: View epics on a board
+ description: |
## View epics on a board
self-managed: true
gitlab-com: false
- packages: ["Free", "Premium", "Ultimate"]
-- title: View Jira issue details in GitLab
- body: |
+ available_in: ["Free", "Premium", "Ultimate"]
+- name: View Jira issue details in GitLab
+ description: |
## View Jira issue details in GitLab
self-managed: true
gitlab-com: false
- packages: ["Premium", "Ultimate"]
-- title: Integrate any IT alerting tool with GitLab
- body: |
+ available_in: ["Premium", "Ultimate"]
+- name: Integrate any IT alerting tool with GitLab
+ description: |
## Integrate any IT alerting tool with GitLab
self-managed: true
gitlab-com: false
- packages: ["Ultimate"] \ No newline at end of file
+ available_in: ["Ultimate"]
diff --git a/spec/fixtures/whats_new/20201225_01_05.yml b/spec/fixtures/whats_new/20201225_01_05.yml
index d707502af54..a14adfeec13 100644
--- a/spec/fixtures/whats_new/20201225_01_05.yml
+++ b/spec/fixtures/whats_new/20201225_01_05.yml
@@ -1,14 +1,14 @@
---
-- title: bright and sunshinin' day
- body: |
+- name: bright and sunshinin' day
+ description: |
bright and sunshinin' [day](https://en.wikipedia.org/wiki/Day)
self-managed: true
gitlab-com: false
- packages: ["Premium", "Ultimate"]
+ available_in: ["Premium", "Ultimate"]
release: '01.05'
-- title: I think I can make it now the pain is gone
- body: |
+- name: I think I can make it now the pain is gone
+ description: |
## I think I can make it now the pain is gone
self-managed: false
gitlab-com: true
- packages: ["Premium", "Ultimate"]
+ available_in: ["Premium", "Ultimate"]
diff --git a/spec/fixtures/whats_new/blank.yml b/spec/fixtures/whats_new/blank.yml
index 4628cae2ecc..1475db0ea82 100644
--- a/spec/fixtures/whats_new/blank.yml
+++ b/spec/fixtures/whats_new/blank.yml
@@ -1,5 +1,5 @@
-- title:
- body:
+- name:
+ description:
stage:
self-managed:
gitlab-com:
diff --git a/spec/fixtures/whats_new/invalid.yml b/spec/fixtures/whats_new/invalid.yml
index a3342be0f24..b888aba9a16 100644
--- a/spec/fixtures/whats_new/invalid.yml
+++ b/spec/fixtures/whats_new/invalid.yml
@@ -1,20 +1,20 @@
-- title: Create and view requirements in GitLab
- body: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
+- name: Create and view requirements in GitLab
+ description: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
stage: Plan
self-managed: true
gitlab-com: true
- packages: [ALL]
- url: https://docs.gitlab.com/ee/user/project/requirements/index.html
+ available_in: [ALL]
+ documentation_link: https://docs.gitlab.com/ee/user/project/requirements/index.html
image_url: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png
published_at: 2020-04-22
release: 12.10
-- title: Retrieve CI/CD secrets from HashiCorp Vault
- body: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
+- name: Retrieve CI/CD secrets from HashiCorp Vault
+ description: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
stage: Release
self-managed: true
gitlab-com: true
- packages: [Free]
- url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
+ available_in: [Free]
+ documentation_link: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
published_at: 2020-04-22
release: 12.10
diff --git a/spec/fixtures/whats_new/valid.yml b/spec/fixtures/whats_new/valid.yml
index ec465f47989..e46ff85cd9c 100644
--- a/spec/fixtures/whats_new/valid.yml
+++ b/spec/fixtures/whats_new/valid.yml
@@ -1,20 +1,20 @@
-- title: Create and view requirements in GitLab
- body: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
+- name: Create and view requirements in GitLab
+ description: The first step towards managing requirements from within GitLab is here! This initial release allows users to create and view requirements at a project level. As Requirements Management evolves in GitLab, stay tuned for support for traceability between all artifacts, creating a seamless workflow to visually demonstrate completeness and compliance.
stage: Plan
self-managed: true
gitlab-com: true
- packages: [Ultimate]
- url: https://docs.gitlab.com/ee/user/project/requirements/index.html
+ available_in: [Ultimate]
+ documentation_link: https://docs.gitlab.com/ee/user/project/requirements/index.html
image_url: https://about.gitlab.com/images/press/logo/png/gitlab-icon-rgb.png
published_at: 2020-04-22
release: 12.10
-- title: Retrieve CI/CD secrets from HashiCorp Vault
- body: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
+- name: Retrieve CI/CD secrets from HashiCorp Vault
+ description: In this release, GitLab adds support for lightweight JSON Web Token (JWT) authentication to integrate with your existing HashiCorp Vault. Now, you can seamlessly provide secrets to CI/CD jobs by taking advantage of HashiCorp's JWT authentication method rather than manually having to provide secrets as a variable in GitLab.
stage: Release
self-managed: true
gitlab-com: true
- packages: [Free]
- url: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
+ available_in: [Free]
+ documentation_link: https://docs.gitlab.com/ee/ci/examples/authenticating-with-hashicorp-vault/index.html
image_url: https://about.gitlab.com/images/12_10/jwt-vault-1.png
published_at: 2020-04-22
release: 12.10
diff --git a/spec/frontend/__helpers__/mock_apollo_helper.js b/spec/frontend/__helpers__/mock_apollo_helper.js
index bae9f33be87..e0739df7086 100644
--- a/spec/frontend/__helpers__/mock_apollo_helper.js
+++ b/spec/frontend/__helpers__/mock_apollo_helper.js
@@ -8,7 +8,6 @@ export function createMockClient(handlers = [], resolvers = {}, cacheOptions = {
const cache = new InMemoryCache({
possibleTypes,
typePolicies,
- addTypename: false,
...cacheOptions,
});
diff --git a/spec/frontend/__helpers__/mock_dom_observer.js b/spec/frontend/__helpers__/mock_dom_observer.js
index bc2646be4c2..8c9c435041e 100644
--- a/spec/frontend/__helpers__/mock_dom_observer.js
+++ b/spec/frontend/__helpers__/mock_dom_observer.js
@@ -22,14 +22,12 @@ class MockObserver {
takeRecords() {}
- // eslint-disable-next-line camelcase
$_triggerObserve(node, { entry = {}, options = {} } = {}) {
if (this.$_hasObserver(node, options)) {
this.$_cb([{ target: node, ...entry }]);
}
}
- // eslint-disable-next-line camelcase
$_hasObserver(node, options = {}) {
return this.$_observers.some(
([obvNode, obvOptions]) => node === obvNode && isMatch(options, obvOptions),
diff --git a/spec/frontend/__helpers__/mocks/axios_utils.js b/spec/frontend/__helpers__/mocks/axios_utils.js
index b1efd29dc8d..60644c84a57 100644
--- a/spec/frontend/__helpers__/mocks/axios_utils.js
+++ b/spec/frontend/__helpers__/mocks/axios_utils.js
@@ -1,4 +1,6 @@
import EventEmitter from 'events';
+// eslint-disable-next-line no-restricted-syntax
+import { setImmediate } from 'timers';
const axios = jest.requireActual('~/lib/utils/axios_utils').default;
diff --git a/spec/frontend/__helpers__/stub_component.js b/spec/frontend/__helpers__/stub_component.js
index 96fe3a8bc45..4f9d1ee6f5d 100644
--- a/spec/frontend/__helpers__/stub_component.js
+++ b/spec/frontend/__helpers__/stub_component.js
@@ -22,6 +22,14 @@ const createStubbedMethods = (methods = {}) => {
);
};
+export const RENDER_ALL_SLOTS_TEMPLATE = `<div>
+ <template v-for="(_, name) in $scopedSlots">
+ <div :data-testid="'slot-' + name">
+ <slot :name="name" />
+ </div>
+ </template>
+</div>`;
+
export function stubComponent(Component, options = {}) {
return {
props: Component.props,
diff --git a/spec/frontend/__helpers__/timeout.js b/spec/frontend/__helpers__/timeout.js
deleted file mode 100644
index 8688625a95e..00000000000
--- a/spec/frontend/__helpers__/timeout.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const NS_PER_SEC = 1e9;
-const NS_PER_MS = 1e6;
-const IS_DEBUGGING = process.execArgv.join(' ').includes('--inspect-brk');
-
-let testTimeoutNS;
-
-export const setTestTimeout = (newTimeoutMS) => {
- const newTimeoutNS = newTimeoutMS * NS_PER_MS;
- // never accept a smaller timeout than the default
- if (newTimeoutNS < testTimeoutNS) {
- return;
- }
-
- testTimeoutNS = newTimeoutNS;
- jest.setTimeout(newTimeoutMS);
-};
-
-// Allows slow tests to set their own timeout.
-// Useful for tests with jQuery, which is very slow in big DOMs.
-let temporaryTimeoutNS = null;
-export const setTestTimeoutOnce = (newTimeoutMS) => {
- const newTimeoutNS = newTimeoutMS * NS_PER_MS;
- // never accept a smaller timeout than the default
- if (newTimeoutNS < testTimeoutNS) {
- return;
- }
-
- temporaryTimeoutNS = newTimeoutNS;
-};
-
-export const initializeTestTimeout = (defaultTimeoutMS) => {
- setTestTimeout(defaultTimeoutMS);
-
- let testStartTime;
-
- // https://github.com/facebook/jest/issues/6947
- beforeEach(() => {
- testStartTime = process.hrtime();
- });
-
- afterEach(() => {
- let timeoutNS = testTimeoutNS;
- if (Number.isFinite(temporaryTimeoutNS)) {
- timeoutNS = temporaryTimeoutNS;
- temporaryTimeoutNS = null;
- }
-
- const [seconds, remainingNs] = process.hrtime(testStartTime);
- const elapsedNS = seconds * NS_PER_SEC + remainingNs;
-
- // Disable the timeout error when debugging. It is meaningless because
- // debugging always takes longer than the test timeout.
- if (elapsedNS > timeoutNS && !IS_DEBUGGING) {
- throw new Error(
- `Test took too long (${elapsedNS / NS_PER_MS}ms > ${timeoutNS / NS_PER_MS}ms)!`,
- );
- }
- });
-};
diff --git a/spec/frontend/__helpers__/vue_mount_component_helper.js b/spec/frontend/__helpers__/vue_mount_component_helper.js
index 615ff69a01c..ed43355ea5b 100644
--- a/spec/frontend/__helpers__/vue_mount_component_helper.js
+++ b/spec/frontend/__helpers__/vue_mount_component_helper.js
@@ -1,5 +1,3 @@
-import Vue from 'vue';
-
/**
* Deprecated. Please do not use.
* Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
@@ -33,31 +31,4 @@ export const mountComponentWithStore = (Component, { el, props, store }) =>
* Deprecated. Please do not use.
* Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
*/
-export const mountComponentWithSlots = (Component, { props, slots }) => {
- const component = new Component({
- propsData: props || {},
- });
-
- component.$slots = slots;
-
- return component.$mount();
-};
-
-/**
- * Mount a component with the given render method.
- *
- * -----------------------------
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- * -----------------------------
- *
- * This helps with inserting slots that need to be compiled.
- */
-export const mountComponentWithRender = (render, el = null) =>
- mountComponent(Vue.extend({ render }), {}, el);
-
-/**
- * Deprecated. Please do not use.
- * Please see https://gitlab.com/groups/gitlab-org/-/epics/2445
- */
export default mountComponent;
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper.js b/spec/frontend/__helpers__/vue_test_utils_helper.js
index 2aae91f8a39..75bd5df8cbf 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper.js
@@ -7,6 +7,20 @@ const vNodeContainsText = (vnode, text) =>
(vnode.children && vnode.children.filter((child) => vNodeContainsText(child, text)).length);
/**
+ * Create a VTU wrapper from an element.
+ *
+ * If a Vue instance manages the element, the wrapper is created
+ * with that Vue instance.
+ *
+ * @param {HTMLElement} element
+ * @param {Object} options
+ * @returns VTU wrapper
+ */
+const createWrapperFromElement = (element, options) =>
+ // eslint-disable-next-line no-underscore-dangle
+ createWrapper(element.__vue__ || element, options || {});
+
+/**
* Determines whether a `shallowMount` Wrapper contains text
* within one of it's slots. This will also work on Wrappers
* acquired with `find()`, but only if it's parent Wrapper
@@ -85,8 +99,7 @@ export const extendedWrapper = (wrapper) => {
if (!elements.length) {
return new ErrorWrapper(query);
}
-
- return createWrapper(elements[0], this.options || {});
+ return createWrapperFromElement(elements[0], this.options);
},
},
};
@@ -104,7 +117,7 @@ export const extendedWrapper = (wrapper) => {
);
const wrappers = elements.map((element) => {
- const elementWrapper = createWrapper(element, this.options || {});
+ const elementWrapper = createWrapperFromElement(element, this.options);
elementWrapper.selector = text;
return elementWrapper;
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
index 3bb228f94b8..ae180c3b49d 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
@@ -6,6 +6,7 @@ import {
WrapperArray as VTUWrapperArray,
ErrorWrapper as VTUErrorWrapper,
} from '@vue/test-utils';
+import Vue from 'vue';
import {
extendedWrapper,
shallowMountExtended,
@@ -139,9 +140,12 @@ describe('Vue test utils helpers', () => {
const text = 'foo bar';
const options = { selector: 'div' };
const mockDiv = document.createElement('div');
+ const mockVm = new Vue({ render: (h) => h('div') }).$mount();
let wrapper;
beforeEach(() => {
+ jest.spyOn(vtu, 'createWrapper');
+
wrapper = extendedWrapper(
shallowMount({
template: `<div>foo bar</div>`,
@@ -164,7 +168,6 @@ describe('Vue test utils helpers', () => {
describe('when element is found', () => {
beforeEach(() => {
jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockDiv]);
- jest.spyOn(vtu, 'createWrapper');
});
it('returns a VTU wrapper', () => {
@@ -172,14 +175,27 @@ describe('Vue test utils helpers', () => {
expect(vtu.createWrapper).toHaveBeenCalledWith(mockDiv, wrapper.options);
expect(result).toBeInstanceOf(VTUWrapper);
+ expect(result.vm).toBeUndefined();
});
});
+ describe('when a Vue instance element is found', () => {
+ beforeEach(() => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockVm.$el]);
+ });
+
+ it('returns a VTU wrapper', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(vtu.createWrapper).toHaveBeenCalledWith(mockVm, wrapper.options);
+ expect(result).toBeInstanceOf(VTUWrapper);
+ expect(result.vm).toBeInstanceOf(Vue);
+ });
+ });
describe('when multiple elements are found', () => {
beforeEach(() => {
const mockSpan = document.createElement('span');
jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockDiv, mockSpan]);
- jest.spyOn(vtu, 'createWrapper');
});
it('returns the first element as a VTU wrapper', () => {
@@ -187,6 +203,24 @@ describe('Vue test utils helpers', () => {
expect(vtu.createWrapper).toHaveBeenCalledWith(mockDiv, wrapper.options);
expect(result).toBeInstanceOf(VTUWrapper);
+ expect(result.vm).toBeUndefined();
+ });
+ });
+
+ describe('when multiple Vue instances are found', () => {
+ beforeEach(() => {
+ const mockVm2 = new Vue({ render: (h) => h('span') }).$mount();
+ jest
+ .spyOn(testingLibrary, expectedQuery)
+ .mockImplementation(() => [mockVm.$el, mockVm2.$el]);
+ });
+
+ it('returns the first element as a VTU wrapper', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(vtu.createWrapper).toHaveBeenCalledWith(mockVm, wrapper.options);
+ expect(result).toBeInstanceOf(VTUWrapper);
+ expect(result.vm).toBeInstanceOf(Vue);
});
});
@@ -211,12 +245,17 @@ describe('Vue test utils helpers', () => {
${'findAllByAltText'} | ${'queryAllByAltText'}
`('$findMethod', ({ findMethod, expectedQuery }) => {
const text = 'foo bar';
- const options = { selector: 'div' };
+ const options = { selector: 'li' };
const mockElements = [
document.createElement('li'),
document.createElement('li'),
document.createElement('li'),
];
+ const mockVms = [
+ new Vue({ render: (h) => h('li') }).$mount(),
+ new Vue({ render: (h) => h('li') }).$mount(),
+ new Vue({ render: (h) => h('li') }).$mount(),
+ ];
let wrapper;
beforeEach(() => {
@@ -245,9 +284,13 @@ describe('Vue test utils helpers', () => {
);
});
- describe('when elements are found', () => {
+ describe.each`
+ case | mockResult | isVueInstance
+ ${'HTMLElements'} | ${mockElements} | ${false}
+ ${'Vue instance elements'} | ${mockVms} | ${true}
+ `('when $case are found', ({ mockResult, isVueInstance }) => {
beforeEach(() => {
- jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => mockElements);
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => mockResult);
});
it('returns a VTU wrapper array', () => {
@@ -257,7 +300,9 @@ describe('Vue test utils helpers', () => {
expect(
result.wrappers.every(
(resultWrapper) =>
- resultWrapper instanceof VTUWrapper && resultWrapper.options === wrapper.options,
+ resultWrapper instanceof VTUWrapper &&
+ resultWrapper.vm instanceof Vue === isVueInstance &&
+ resultWrapper.options === wrapper.options,
),
).toBe(true);
expect(result.length).toBe(3);
diff --git a/spec/frontend/__helpers__/vuex_action_helper.js b/spec/frontend/__helpers__/vuex_action_helper.js
index ab2637d6024..bdd5a0a9034 100644
--- a/spec/frontend/__helpers__/vuex_action_helper.js
+++ b/spec/frontend/__helpers__/vuex_action_helper.js
@@ -1,5 +1,7 @@
-/**
- * Helper for testing action with expected mutations inspired in
+// eslint-disable-next-line no-restricted-syntax
+import { setImmediate } from 'timers';
+
+/** Helper for testing action with expected mutations inspired in
* https://vuex.vuejs.org/en/testing.html
*
* @param {(Function|Object)} action to be tested, or object of named parameters
diff --git a/spec/frontend/__helpers__/vuex_action_helper_spec.js b/spec/frontend/__helpers__/vuex_action_helper_spec.js
index 5bb2b3b26e2..182aea9c1c5 100644
--- a/spec/frontend/__helpers__/vuex_action_helper_spec.js
+++ b/spec/frontend/__helpers__/vuex_action_helper_spec.js
@@ -76,7 +76,7 @@ describe.each([testActionFn, testActionFnWithOptionsArg])(
const promise = testAction(() => {}, null, {}, assertion.mutations, assertion.actions);
- originalExpect(promise instanceof Promise).toBeTruthy();
+ originalExpect(promise instanceof Promise).toBe(true);
return promise;
});
diff --git a/spec/frontend/__helpers__/wait_for_promises.js b/spec/frontend/__helpers__/wait_for_promises.js
index 753c3c5d92b..5a15b8b74b5 100644
--- a/spec/frontend/__helpers__/wait_for_promises.js
+++ b/spec/frontend/__helpers__/wait_for_promises.js
@@ -1,4 +1,2 @@
-export default () =>
- new Promise((resolve) => {
- requestAnimationFrame(resolve);
- });
+// eslint-disable-next-line no-restricted-syntax
+export default () => new Promise(jest.requireActual('timers').setImmediate);
diff --git a/spec/frontend/__helpers__/web_worker_transformer.js b/spec/frontend/__helpers__/web_worker_transformer.js
index 5b2f7d77947..767ab3f5675 100644
--- a/spec/frontend/__helpers__/web_worker_transformer.js
+++ b/spec/frontend/__helpers__/web_worker_transformer.js
@@ -6,7 +6,7 @@ const babelJestTransformer = require('babel-jest');
// [1]: https://webpack.js.org/loaders/worker-loader/
module.exports = {
process: (contentArg, filename, ...args) => {
- const { code: content } = babelJestTransformer.process(contentArg, filename, ...args);
+ const { code: content } = babelJestTransformer.default.process(contentArg, filename, ...args);
return `const { FakeWebWorker } = require("helpers/web_worker_fake");
module.exports = class JestTransformedWorker extends FakeWebWorker {
diff --git a/spec/frontend/__mocks__/monaco-editor/index.js b/spec/frontend/__mocks__/monaco-editor/index.js
index 18b7df32f9b..384f9993150 100644
--- a/spec/frontend/__mocks__/monaco-editor/index.js
+++ b/spec/frontend/__mocks__/monaco-editor/index.js
@@ -15,4 +15,3 @@ jest.mock('monaco-editor/esm/vs/language/typescript/tsMode');
jest.mock('monaco-yaml/lib/esm/yamlMode');
export * from 'monaco-editor/esm/vs/editor/editor.api';
-export default global.monaco;
diff --git a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
index 36003154b58..2bd2b17a12d 100644
--- a/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
+++ b/spec/frontend/access_tokens/components/__snapshots__/expires_at_field_spec.js.snap
@@ -11,22 +11,17 @@ exports[`~/access_tokens/components/expires_at_field should render datepicker wi
arialabel=""
autocomplete=""
container=""
+ data-qa-selector="expiry_date_field"
+ defaultdate="Wed Aug 05 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
displayfield="true"
firstday="0"
+ inputid="personal_access_token_expires_at"
inputlabel="Enter date"
+ inputname="personal_access_token[expires_at]"
mindate="Mon Jul 06 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
placeholder="YYYY-MM-DD"
+ showclearbutton="true"
theme=""
- >
- <gl-form-input-stub
- autocomplete="off"
- class="datepicker gl-datepicker-input"
- data-qa-selector="expiry_date_field"
- id="personal_access_token_expires_at"
- inputmode="none"
- name="personal_access_token[expires_at]"
- placeholder="YYYY-MM-DD"
- />
- </gl-datepicker-stub>
+ />
</gl-form-group-stub>
`;
diff --git a/spec/frontend/access_tokens/components/expires_at_field_spec.js b/spec/frontend/access_tokens/components/expires_at_field_spec.js
index cb899d10ba7..646dc0d703f 100644
--- a/spec/frontend/access_tokens/components/expires_at_field_spec.js
+++ b/spec/frontend/access_tokens/components/expires_at_field_spec.js
@@ -1,6 +1,7 @@
import { shallowMount } from '@vue/test-utils';
import { GlDatepicker } from '@gitlab/ui';
import ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
+import { getDateInFuture } from '~/lib/utils/datetime_utility';
describe('~/access_tokens/components/expires_at_field', () => {
let wrapper;
@@ -49,4 +50,12 @@ describe('~/access_tokens/components/expires_at_field', () => {
expect(findDatepicker().props('maxDate')).toStrictEqual(maxDate);
});
+
+ it('should set the default expiration date to be 30 days', () => {
+ const today = new Date();
+ const future = getDateInFuture(today, 30);
+ createComponent();
+
+ expect(findDatepicker().props('defaultDate')).toStrictEqual(future);
+ });
});
diff --git a/spec/frontend/access_tokens/components/projects_field_spec.js b/spec/frontend/access_tokens/components/projects_field_spec.js
deleted file mode 100644
index 1c4fe7bb168..00000000000
--- a/spec/frontend/access_tokens/components/projects_field_spec.js
+++ /dev/null
@@ -1,131 +0,0 @@
-import { nextTick } from 'vue';
-import { within, fireEvent } from '@testing-library/dom';
-import { mount } from '@vue/test-utils';
-import ProjectsField from '~/access_tokens/components/projects_field.vue';
-import ProjectsTokenSelector from '~/access_tokens/components/projects_token_selector.vue';
-
-describe('ProjectsField', () => {
- let wrapper;
-
- const createComponent = ({ inputAttrsValue = '' } = {}) => {
- wrapper = mount(ProjectsField, {
- propsData: {
- inputAttrs: {
- id: 'projects',
- name: 'projects',
- value: inputAttrsValue,
- },
- },
- });
- };
-
- const queryByLabelText = (text) => within(wrapper.element).queryByLabelText(text);
- const queryByText = (text) => within(wrapper.element).queryByText(text);
- const findAllProjectsRadio = () => queryByLabelText('All projects');
- const findSelectedProjectsRadio = () => queryByLabelText('Selected projects');
- const findProjectsTokenSelector = () => wrapper.findComponent(ProjectsTokenSelector);
- const findHiddenInput = () => wrapper.find('input[type="hidden"]');
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('renders label and sub-label', () => {
- createComponent();
-
- expect(queryByText('Projects')).not.toBe(null);
- expect(queryByText('Set access permissions for this token.')).not.toBe(null);
- });
-
- describe('when `inputAttrs.value` is empty', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('renders "All projects" radio as checked', () => {
- expect(findAllProjectsRadio().checked).toBe(true);
- });
-
- it('renders "Selected projects" radio as unchecked', () => {
- expect(findSelectedProjectsRadio().checked).toBe(false);
- });
-
- it('sets `projects-token-selector` `initialProjectIds` prop to an empty array', () => {
- expect(findProjectsTokenSelector().props('initialProjectIds')).toEqual([]);
- });
- });
-
- describe('when `inputAttrs.value` is a comma separated list of project IDs', () => {
- beforeEach(() => {
- createComponent({ inputAttrsValue: '1,2' });
- });
-
- it('renders "All projects" radio as unchecked', () => {
- expect(findAllProjectsRadio().checked).toBe(false);
- });
-
- it('renders "Selected projects" radio as checked', () => {
- expect(findSelectedProjectsRadio().checked).toBe(true);
- });
-
- it('sets `projects-token-selector` `initialProjectIds` prop to an array of project IDs', () => {
- expect(findProjectsTokenSelector().props('initialProjectIds')).toEqual(['1', '2']);
- });
- });
-
- it('renders `projects-token-selector` component', () => {
- createComponent();
-
- expect(findProjectsTokenSelector().exists()).toBe(true);
- });
-
- it('renders hidden input with correct `name` and `id` attributes', () => {
- createComponent();
-
- expect(findHiddenInput().attributes()).toEqual(
- expect.objectContaining({
- id: 'projects',
- name: 'projects',
- }),
- );
- });
-
- describe('when `projects-token-selector` is focused', () => {
- beforeEach(() => {
- createComponent();
-
- findProjectsTokenSelector().vm.$emit('focus');
- });
-
- it('auto selects the "Selected projects" radio', () => {
- expect(findSelectedProjectsRadio().checked).toBe(true);
- });
-
- describe('when `projects-token-selector` is changed', () => {
- beforeEach(() => {
- findProjectsTokenSelector().vm.$emit('input', [
- {
- id: 1,
- },
- {
- id: 2,
- },
- ]);
- });
-
- it('updates the hidden input value to a comma separated list of project IDs', () => {
- expect(findHiddenInput().attributes('value')).toBe('1,2');
- });
-
- describe('when radio is changed back to "All projects"', () => {
- it('removes the hidden input value', async () => {
- fireEvent.change(findAllProjectsRadio());
- await nextTick();
-
- expect(findHiddenInput().attributes('value')).toBe('');
- });
- });
- });
- });
-});
diff --git a/spec/frontend/access_tokens/components/projects_token_selector_spec.js b/spec/frontend/access_tokens/components/projects_token_selector_spec.js
deleted file mode 100644
index 40aaf16d41f..00000000000
--- a/spec/frontend/access_tokens/components/projects_token_selector_spec.js
+++ /dev/null
@@ -1,266 +0,0 @@
-import {
- GlAvatar,
- GlAvatarLabeled,
- GlIntersectionObserver,
- GlToken,
- GlTokenSelector,
- GlLoadingIcon,
-} from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import produce from 'immer';
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-
-import getProjectsQueryResponse from 'test_fixtures/graphql/projects/access_tokens/get_projects.query.graphql.json';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
-import ProjectsTokenSelector from '~/access_tokens/components/projects_token_selector.vue';
-import getProjectsQuery from '~/access_tokens/graphql/queries/get_projects.query.graphql';
-import { getIdFromGraphQLId } from '~/graphql_shared/utils';
-
-describe('ProjectsTokenSelector', () => {
- const getProjectsQueryResponsePage2 = produce(
- getProjectsQueryResponse,
- (getProjectsQueryResponseDraft) => {
- /* eslint-disable no-param-reassign */
- getProjectsQueryResponseDraft.data.projects.pageInfo.hasNextPage = false;
- getProjectsQueryResponseDraft.data.projects.pageInfo.endCursor = null;
- getProjectsQueryResponseDraft.data.projects.nodes.splice(1, 1);
- getProjectsQueryResponseDraft.data.projects.nodes[0].id = 'gid://gitlab/Project/100';
- /* eslint-enable no-param-reassign */
- },
- );
-
- const runDebounce = () => jest.runAllTimers();
-
- const { pageInfo, nodes: projects } = getProjectsQueryResponse.data.projects;
- const project1 = projects[0];
- const project2 = projects[1];
-
- let wrapper;
-
- let resolveGetProjectsQuery;
- let resolveGetInitialProjectsQuery;
- const getProjectsQueryRequestHandler = jest.fn(
- ({ ids }) =>
- new Promise((resolve) => {
- if (ids) {
- resolveGetInitialProjectsQuery = resolve;
- } else {
- resolveGetProjectsQuery = resolve;
- }
- }),
- );
-
- const createComponent = ({
- propsData = {},
- apolloProvider = createMockApollo([[getProjectsQuery, getProjectsQueryRequestHandler]]),
- resolveQueries = true,
- } = {}) => {
- Vue.use(VueApollo);
-
- wrapper = extendedWrapper(
- mount(ProjectsTokenSelector, {
- apolloProvider,
- propsData: {
- selectedProjects: [],
- initialProjectIds: [],
- ...propsData,
- },
- stubs: ['gl-intersection-observer'],
- }),
- );
-
- runDebounce();
-
- if (resolveQueries) {
- resolveGetProjectsQuery(getProjectsQueryResponse);
-
- return waitForPromises();
- }
-
- return Promise.resolve();
- };
-
- const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
- const findTokenSelectorInput = () => findTokenSelector().find('input[type="text"]');
- const findIntersectionObserver = () => wrapper.findComponent(GlIntersectionObserver);
-
- it('renders dropdown items with project avatars', async () => {
- await createComponent();
-
- wrapper.findAllComponents(GlAvatarLabeled).wrappers.forEach((avatarLabeledWrapper, index) => {
- const project = projects[index];
-
- expect(avatarLabeledWrapper.attributes()).toEqual(
- expect.objectContaining({
- 'entity-id': `${getIdFromGraphQLId(project.id)}`,
- 'entity-name': project.name,
- ...(project.avatarUrl && { src: project.avatarUrl }),
- }),
- );
-
- expect(avatarLabeledWrapper.props()).toEqual(
- expect.objectContaining({
- label: project.name,
- subLabel: project.nameWithNamespace,
- }),
- );
- });
- });
-
- it('renders tokens with project avatars', () => {
- createComponent({
- propsData: {
- selectedProjects: [{ ...project2, id: getIdFromGraphQLId(project2.id) }],
- },
- });
-
- const token = wrapper.findComponent(GlToken);
- const avatar = token.findComponent(GlAvatar);
-
- expect(token.text()).toContain(project2.nameWithNamespace);
- expect(avatar.attributes('src')).toBe(project2.avatarUrl);
- expect(avatar.props()).toEqual(
- expect.objectContaining({
- entityId: getIdFromGraphQLId(project2.id),
- entityName: project2.name,
- }),
- );
- });
-
- describe('when `enter` key is pressed', () => {
- it('calls `preventDefault` so form is not submitted when user selects a project from the dropdown', () => {
- createComponent();
-
- const event = {
- preventDefault: jest.fn(),
- };
-
- findTokenSelectorInput().trigger('keydown.enter', event);
-
- expect(event.preventDefault).toHaveBeenCalled();
- });
- });
-
- describe('when text input is typed in', () => {
- const searchTerm = 'foo bar';
-
- beforeEach(async () => {
- await createComponent();
-
- await findTokenSelectorInput().setValue(searchTerm);
- runDebounce();
- });
-
- it('makes GraphQL request with `search` variable set', async () => {
- expect(getProjectsQueryRequestHandler).toHaveBeenLastCalledWith({
- search: searchTerm,
- after: null,
- first: 20,
- ids: null,
- });
- });
-
- it('sets loading state while waiting for GraphQL request to resolve', async () => {
- expect(findTokenSelector().props('loading')).toBe(true);
-
- resolveGetProjectsQuery(getProjectsQueryResponse);
- await waitForPromises();
-
- expect(findTokenSelector().props('loading')).toBe(false);
- });
- });
-
- describe('when there is a next page of projects and user scrolls to the bottom of the dropdown', () => {
- beforeEach(async () => {
- await createComponent();
-
- findIntersectionObserver().vm.$emit('appear');
- });
-
- it('makes GraphQL request with `after` variable set', async () => {
- expect(getProjectsQueryRequestHandler).toHaveBeenLastCalledWith({
- after: pageInfo.endCursor,
- first: 20,
- search: '',
- ids: null,
- });
- });
-
- it('displays loading icon while waiting for GraphQL request to resolve', async () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
-
- resolveGetProjectsQuery(getProjectsQueryResponsePage2);
- await waitForPromises();
-
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
- });
- });
-
- describe('when there is not a next page of projects', () => {
- it('does not render `GlIntersectionObserver`', async () => {
- createComponent({ resolveQueries: false });
-
- resolveGetProjectsQuery(getProjectsQueryResponsePage2);
- await waitForPromises();
-
- expect(findIntersectionObserver().exists()).toBe(false);
- });
- });
-
- describe('when `GlTokenSelector` emits `input` event', () => {
- it('emits `input` event used by `v-model`', () => {
- findTokenSelector().vm.$emit('input', project1);
-
- expect(wrapper.emitted('input')[0]).toEqual([project1]);
- });
- });
-
- describe('when `GlTokenSelector` emits `focus` event', () => {
- it('emits `focus` event', () => {
- const event = { fakeEvent: 'foo' };
- findTokenSelector().vm.$emit('focus', event);
-
- expect(wrapper.emitted('focus')[0]).toEqual([event]);
- });
- });
-
- describe('when `initialProjectIds` is an empty array', () => {
- it('does not request initial projects', async () => {
- await createComponent();
-
- expect(getProjectsQueryRequestHandler).toHaveBeenCalledTimes(1);
- expect(getProjectsQueryRequestHandler).toHaveBeenCalledWith(
- expect.objectContaining({
- ids: null,
- }),
- );
- });
- });
-
- describe('when `initialProjectIds` is an array of project IDs', () => {
- it('requests those projects and emits `input` event with result', async () => {
- await createComponent({
- propsData: {
- initialProjectIds: [getIdFromGraphQLId(project1.id), getIdFromGraphQLId(project2.id)],
- },
- });
-
- resolveGetInitialProjectsQuery(getProjectsQueryResponse);
- await waitForPromises();
-
- expect(getProjectsQueryRequestHandler).toHaveBeenCalledWith({
- after: '',
- first: null,
- search: '',
- ids: [project1.id, project2.id],
- });
- expect(wrapper.emitted('input')[0][0]).toEqual([
- { ...project1, id: getIdFromGraphQLId(project1.id) },
- { ...project2, id: getIdFromGraphQLId(project2.id) },
- ]);
- });
- });
-});
diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js
index b6119f1d167..0c611a4a512 100644
--- a/spec/frontend/access_tokens/index_spec.js
+++ b/spec/frontend/access_tokens/index_spec.js
@@ -8,13 +8,11 @@ import {
initAccessTokenTableApp,
initExpiresAtField,
initNewAccessTokenApp,
- initProjectsField,
initTokensApp,
} from '~/access_tokens';
import * as AccessTokenTableApp from '~/access_tokens/components/access_token_table_app.vue';
-import * as ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
+import ExpiresAtField from '~/access_tokens/components/expires_at_field.vue';
import * as NewAccessTokenApp from '~/access_tokens/components/new_access_token_app.vue';
-import * as ProjectsField from '~/access_tokens/components/projects_field.vue';
import * as TokensApp from '~/access_tokens/components/tokens_app.vue';
import { FEED_TOKEN, INCOMING_EMAIL_TOKEN, STATIC_OBJECT_TOKEN } from '~/access_tokens/constants';
import { __, sprintf } from '~/locale';
@@ -115,49 +113,28 @@ describe('access tokens', () => {
});
});
- describe.each`
- initFunction | mountSelector | fieldName | expectedComponent
- ${initExpiresAtField} | ${'js-access-tokens-expires-at'} | ${'expiresAt'} | ${ExpiresAtField}
- ${initProjectsField} | ${'js-access-tokens-projects'} | ${'projects'} | ${ProjectsField}
- `('$initFunction', ({ initFunction, mountSelector, fieldName, expectedComponent }) => {
+ describe('initExpiresAtField', () => {
describe('when mount element exists', () => {
- const FakeComponent = Vue.component('FakeComponent', {
- props: ['inputAttrs'],
- render: () => null,
- });
-
- const nameAttribute = `access_tokens[${fieldName}]`;
- const idAttribute = `access_tokens_${fieldName}`;
+ const nameAttribute = 'access_tokens[expires_at]';
+ const idAttribute = 'access_tokens_expires_at';
beforeEach(() => {
- window.gon = { features: { personalAccessTokensScopedToProjects: true } };
-
setHTMLFixture(
- `<div class="${mountSelector}">
+ `<div class="js-access-tokens-expires-at">
<input
- name="${nameAttribute}"
- data-js-name="${fieldName}"
- id="${idAttribute}"
+ name="access_tokens[expires_at]"
+ data-js-name="expiresAt"
+ id="access_tokens_expires_at"
placeholder="Foo bar"
value="1,2"
/>
</div>`,
);
-
- // Mock component so we don't have to deal with mocking Apollo
- // eslint-disable-next-line no-param-reassign
- expectedComponent.default = FakeComponent;
- });
-
- afterEach(() => {
- delete window.gon;
});
it('mounts component and sets `inputAttrs` prop', async () => {
- const vueInstance = await initFunction();
-
- wrapper = createWrapper(vueInstance);
- const component = wrapper.findComponent(FakeComponent);
+ wrapper = createWrapper(initExpiresAtField());
+ const component = wrapper.findComponent(ExpiresAtField);
expect(component.exists()).toBe(true);
expect(component.props('inputAttrs')).toEqual({
@@ -171,7 +148,7 @@ describe('access tokens', () => {
describe('when mount element does not exist', () => {
it('returns `null`', () => {
- expect(initFunction()).toBe(null);
+ expect(initExpiresAtField()).toBe(null);
});
});
});
diff --git a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
index 9b93fd26fa0..bffadbde087 100644
--- a/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
+++ b/spec/frontend/add_context_commits_modal/components/add_context_commits_modal_spec.js
@@ -87,7 +87,7 @@ describe('AddContextCommitsModal', () => {
it('enabled ok button when atleast one row is selected', async () => {
wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
await nextTick();
- expect(findModal().attributes('ok-disabled')).toBeFalsy();
+ expect(findModal().attributes('ok-disabled')).toBe(undefined);
});
});
@@ -102,7 +102,7 @@ describe('AddContextCommitsModal', () => {
it('an enabled ok button when atleast one row is selected', async () => {
wrapper.vm.$store.state.selectedCommits = [{ ...commit, isSelected: true }];
await nextTick();
- expect(findModal().attributes('ok-disabled')).toBeFalsy();
+ expect(findModal().attributes('ok-disabled')).toBe(undefined);
});
it('a disabled ok button in first tab, when row is selected in second tab', () => {
diff --git a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
index d6c5c5f963a..534af2a3033 100644
--- a/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
+++ b/spec/frontend/admin/analytics/devops_score/components/devops_score_spec.js
@@ -129,7 +129,7 @@ describe('DevopsScore', () => {
});
it('displays the correct badge', () => {
- const badge = findUsageCol().find(GlBadge);
+ const badge = findUsageCol().findComponent(GlBadge);
expect(badge.exists()).toBe(true);
expect(badge.props('variant')).toBe('muted');
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
index ae9b6f57ee0..eecc21e206b 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
@@ -24,7 +24,7 @@ describe('Signup Form', () => {
const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
const findHiddenInput = () => findByTestId('input');
- const findCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findCheckboxLabel = () => findByTestId('label');
const findHelpText = () => findByTestId('helpText');
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
index 31a0c2b07e4..411126d0c89 100644
--- a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
+++ b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
@@ -28,7 +28,7 @@ describe('Signup Form', () => {
const findForm = () => wrapper.findByTestId('form');
const findInputCsrf = () => findForm().find('[name="authenticity_token"]');
- const findFormSubmitButton = () => findForm().find(GlButton);
+ const findFormSubmitButton = () => findForm().findComponent(GlButton);
const findDenyListRawRadio = () => queryByLabelText('Enter denylist manually');
const findDenyListFileRadio = () => queryByLabelText('Upload denylist file');
@@ -36,7 +36,7 @@ describe('Signup Form', () => {
const findDenyListRawInputGroup = () => wrapper.findByTestId('domain-denylist-raw-input-group');
const findDenyListFileInputGroup = () => wrapper.findByTestId('domain-denylist-file-input-group');
const findUserCapInput = () => wrapper.findByTestId('user-cap-input');
- const findModal = () => wrapper.find(GlModal);
+ const findModal = () => wrapper.findComponent(GlModal);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/admin/statistics_panel/components/app_spec.js b/spec/frontend/admin/statistics_panel/components/app_spec.js
index bac542e72fb..190f0eb94a0 100644
--- a/spec/frontend/admin/statistics_panel/components/app_spec.js
+++ b/spec/frontend/admin/statistics_panel/components/app_spec.js
@@ -41,7 +41,7 @@ describe('Admin statistics app', () => {
store.dispatch('requestStatistics');
createComponent();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/admin/users/components/actions/actions_spec.js b/spec/frontend/admin/users/components/actions/actions_spec.js
index b758c15a91a..4967753b91c 100644
--- a/spec/frontend/admin/users/components/actions/actions_spec.js
+++ b/spec/frontend/admin/users/components/actions/actions_spec.js
@@ -12,7 +12,7 @@ import { paths } from '../../mock_data';
describe('Action components', () => {
let wrapper;
- const findDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
const initComponent = ({ component, props } = {}) => {
wrapper = shallowMount(component, {
diff --git a/spec/frontend/admin/users/components/app_spec.js b/spec/frontend/admin/users/components/app_spec.js
index 65b13e3a40d..913732aae42 100644
--- a/spec/frontend/admin/users/components/app_spec.js
+++ b/spec/frontend/admin/users/components/app_spec.js
@@ -28,7 +28,7 @@ describe('AdminUsersApp component', () => {
});
it('renders the admin users table with props', () => {
- expect(wrapper.find(AdminUsersTable).props()).toEqual({
+ expect(wrapper.findComponent(AdminUsersTable).props()).toEqual({
users,
paths,
});
diff --git a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
index 09a345ac826..70ed9eeb3e1 100644
--- a/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
+++ b/spec/frontend/admin/users/components/modals/delete_user_modal_spec.js
@@ -17,7 +17,7 @@ describe('Delete user modal', () => {
const findButton = (variant, category) =>
wrapper
- .findAll(GlButton)
+ .findAllComponents(GlButton)
.filter((w) => w.attributes('variant') === variant && w.attributes('category') === category)
.at(0);
const findForm = () => wrapper.find('form');
@@ -87,8 +87,8 @@ describe('Delete user modal', () => {
});
it('has disabled buttons', () => {
- expect(findPrimaryButton().attributes('disabled')).toBeTruthy();
- expect(findSecondaryButton().attributes('disabled')).toBeTruthy();
+ expect(findPrimaryButton().attributes('disabled')).toBe('true');
+ expect(findSecondaryButton().attributes('disabled')).toBe('true');
});
});
@@ -105,8 +105,8 @@ describe('Delete user modal', () => {
});
it('has disabled buttons', () => {
- expect(findPrimaryButton().attributes('disabled')).toBeTruthy();
- expect(findSecondaryButton().attributes('disabled')).toBeTruthy();
+ expect(findPrimaryButton().attributes('disabled')).toBe('true');
+ expect(findSecondaryButton().attributes('disabled')).toBe('true');
});
});
@@ -123,8 +123,8 @@ describe('Delete user modal', () => {
});
it('has enabled buttons', () => {
- expect(findPrimaryButton().attributes('disabled')).toBeFalsy();
- expect(findSecondaryButton().attributes('disabled')).toBeFalsy();
+ expect(findPrimaryButton().attributes('disabled')).toBeUndefined();
+ expect(findSecondaryButton().attributes('disabled')).toBeUndefined();
});
describe('when primary action is clicked', () => {
diff --git a/spec/frontend/admin/users/components/user_actions_spec.js b/spec/frontend/admin/users/components/user_actions_spec.js
index e04c43ae3f2..ffc05e744c8 100644
--- a/spec/frontend/admin/users/components/user_actions_spec.js
+++ b/spec/frontend/admin/users/components/user_actions_spec.js
@@ -83,7 +83,7 @@ describe('AdminUserActions component', () => {
});
it.each(CONFIRMATION_ACTIONS)('renders an action component item for "%s"', (action) => {
- const component = wrapper.find(Actions[capitalizeFirstCharacter(action)]);
+ const component = wrapper.findComponent(Actions[capitalizeFirstCharacter(action)]);
expect(component.props('username')).toBe(user.name);
expect(component.props('path')).toBe(userPaths[action]);
@@ -119,7 +119,7 @@ describe('AdminUserActions component', () => {
});
it.each(DELETE_ACTIONS)('renders a delete action component item for "%s"', (action) => {
- const component = wrapper.find(Actions[capitalizeFirstCharacter(action)]);
+ const component = wrapper.findComponent(Actions[capitalizeFirstCharacter(action)]);
expect(component.props('username')).toBe(user.name);
expect(component.props('paths')).toEqual(userPaths);
diff --git a/spec/frontend/admin/users/components/user_avatar_spec.js b/spec/frontend/admin/users/components/user_avatar_spec.js
index 8bbfb89bec1..94fac875fbe 100644
--- a/spec/frontend/admin/users/components/user_avatar_spec.js
+++ b/spec/frontend/admin/users/components/user_avatar_spec.js
@@ -12,10 +12,10 @@ describe('AdminUserAvatar component', () => {
const user = users[0];
const adminUserPath = paths.adminUser;
- const findNote = () => wrapper.find(GlIcon);
- const findAvatar = () => wrapper.find(GlAvatarLabeled);
+ const findNote = () => wrapper.findComponent(GlIcon);
+ const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
const findUserLink = () => wrapper.find('.js-user-link');
- const findAllBadges = () => wrapper.findAll(GlBadge);
+ const findAllBadges = () => wrapper.findAllComponents(GlBadge);
const findTooltip = () => getBinding(findNote().element, 'gl-tooltip');
const initComponent = (props = {}) => {
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index ad1c45495b5..fe07f0fce00 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -30,10 +30,10 @@ describe('AdminUsersTable component', () => {
const fetchGroupCountsResponse = createFetchGroupCount([{ id: user.id, groupCount: 5 }]);
const findUserGroupCount = (id) => wrapper.findByTestId(`user-group-count-${id}`);
- const findUserGroupCountLoader = (id) => findUserGroupCount(id).find(GlSkeletonLoader);
+ const findUserGroupCountLoader = (id) => findUserGroupCount(id).findComponent(GlSkeletonLoader);
const getCellByLabel = (trIdx, label) => {
return wrapper
- .find(GlTable)
+ .findComponent(GlTable)
.find('tbody')
.findAll('tr')
.at(trIdx)
@@ -72,7 +72,7 @@ describe('AdminUsersTable component', () => {
});
it('renders the user actions', () => {
- expect(wrapper.find(AdminUserActions).exists()).toBe(true);
+ expect(wrapper.findComponent(AdminUserActions).exists()).toBe(true);
});
it.each`
@@ -81,7 +81,7 @@ describe('AdminUsersTable component', () => {
${AdminUserDate} | ${'Created on'}
${AdminUserDate} | ${'Last activity'}
`('renders the component for column $label', ({ component, label }) => {
- expect(getCellByLabel(0, label).find(component).exists()).toBe(true);
+ expect(getCellByLabel(0, label).findComponent(component).exists()).toBe(true);
});
});
diff --git a/spec/frontend/admin/users/index_spec.js b/spec/frontend/admin/users/index_spec.js
index 961fa96acdd..b51858d5129 100644
--- a/spec/frontend/admin/users/index_spec.js
+++ b/spec/frontend/admin/users/index_spec.js
@@ -8,7 +8,7 @@ describe('initAdminUsersApp', () => {
let wrapper;
let el;
- const findApp = () => wrapper.find(AdminUsersApp);
+ const findApp = () => wrapper.findComponent(AdminUsersApp);
beforeEach(() => {
el = document.createElement('div');
@@ -36,7 +36,7 @@ describe('initAdminUserActions', () => {
let wrapper;
let el;
- const findUserActions = () => wrapper.find(UserActions);
+ const findUserActions = () => wrapper.findComponent(UserActions);
beforeEach(() => {
el = document.createElement('div');
diff --git a/spec/frontend/api/groups_api_spec.js b/spec/frontend/api/groups_api_spec.js
new file mode 100644
index 00000000000..e14ead0b8eb
--- /dev/null
+++ b/spec/frontend/api/groups_api_spec.js
@@ -0,0 +1,46 @@
+import MockAdapter from 'axios-mock-adapter';
+import httpStatus from '~/lib/utils/http_status';
+import axios from '~/lib/utils/axios_utils';
+import { updateGroup } from '~/api/groups_api';
+
+const mockApiVersion = 'v4';
+const mockUrlRoot = '/gitlab';
+
+describe('GroupsApi', () => {
+ let originalGon;
+ let mock;
+
+ const dummyGon = {
+ api_version: mockApiVersion,
+ relative_url_root: mockUrlRoot,
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ originalGon = window.gon;
+ window.gon = { ...dummyGon };
+ });
+
+ afterEach(() => {
+ mock.restore();
+ window.gon = originalGon;
+ });
+
+ describe('updateGroup', () => {
+ const mockGroupId = '99';
+ const mockData = { attr: 'value' };
+ const expectedUrl = `${mockUrlRoot}/api/${mockApiVersion}/groups/${mockGroupId}`;
+
+ beforeEach(() => {
+ mock.onPut(expectedUrl).reply(({ data }) => {
+ return [httpStatus.OK, { id: mockGroupId, ...JSON.parse(data) }];
+ });
+ });
+
+ it('updates group', async () => {
+ const res = await updateGroup(mockGroupId, mockData);
+
+ expect(res.data).toMatchObject({ id: mockGroupId, ...mockData });
+ });
+ });
+});
diff --git a/spec/frontend/attention_requests/components/navigation_popover_spec.js b/spec/frontend/attention_requests/components/navigation_popover_spec.js
deleted file mode 100644
index e4d53d5dbdb..00000000000
--- a/spec/frontend/attention_requests/components/navigation_popover_spec.js
+++ /dev/null
@@ -1,88 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import { GlPopover, GlButton, GlSprintf, GlIcon } from '@gitlab/ui';
-import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
-import NavigationPopover from '~/attention_requests/components/navigation_popover.vue';
-import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
-
-let wrapper;
-let dismiss;
-
-function createComponent(provideData = {}, shouldShowCallout = true) {
- wrapper = shallowMount(NavigationPopover, {
- provide: {
- message: ['Test'],
- observerElSelector: '.js-test',
- observerElToggledClass: 'show',
- featureName: 'attention_requests',
- popoverTarget: '.js-test-popover',
- ...provideData,
- },
- stubs: {
- UserCalloutDismisser: makeMockUserCalloutDismisser({
- dismiss,
- shouldShowCallout,
- }),
- GlSprintf,
- },
- });
-}
-
-describe('Attention requests navigation popover', () => {
- beforeEach(() => {
- setHTMLFixture('<div><div class="js-test-popover"></div><div class="js-test"></div></div>');
- dismiss = jest.fn();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- resetHTMLFixture();
- });
-
- it('hides popover if callout is disabled', () => {
- createComponent({}, false);
-
- expect(wrapper.findComponent(GlPopover).exists()).toBe(false);
- });
-
- it('shows popover if callout is enabled', () => {
- createComponent();
-
- expect(wrapper.findComponent(GlPopover).exists()).toBe(true);
- });
-
- it.each`
- isDesktop | device | expectedPlacement
- ${true} | ${'desktop'} | ${'left'}
- ${false} | ${'mobile'} | ${'bottom'}
- `(
- 'sets popover position to $expectedPlacement on $device',
- ({ isDesktop, expectedPlacement }) => {
- jest.spyOn(bp, 'isDesktop').mockReturnValue(isDesktop);
-
- createComponent();
-
- expect(wrapper.findComponent(GlPopover).props('placement')).toBe(expectedPlacement);
- },
- );
-
- it('calls dismiss when clicking action button', () => {
- createComponent();
-
- wrapper
- .findComponent(GlButton)
- .vm.$emit('click', { preventDefault() {}, stopPropagation() {} });
-
- expect(dismiss).toHaveBeenCalled();
- });
-
- it('shows icon in text', () => {
- createComponent({ showAttentionIcon: true, message: ['%{strongStart}Test%{strongEnd}'] });
-
- const icon = wrapper.findComponent(GlIcon);
-
- expect(icon.exists()).toBe(true);
- expect(icon.props('name')).toBe('attention');
- });
-});
diff --git a/spec/frontend/batch_comments/components/review_bar_spec.js b/spec/frontend/batch_comments/components/review_bar_spec.js
index f50db6ab210..f98e0a4c64a 100644
--- a/spec/frontend/batch_comments/components/review_bar_spec.js
+++ b/spec/frontend/batch_comments/components/review_bar_spec.js
@@ -6,6 +6,8 @@ import createStore from '../create_batch_comments_store';
describe('Batch comments review bar component', () => {
let store;
let wrapper;
+ let addEventListenerSpy;
+ let removeEventListenerSpy;
const createComponent = (propsData = {}) => {
store = createStore();
@@ -18,25 +20,58 @@ describe('Batch comments review bar component', () => {
beforeEach(() => {
document.body.className = '';
+
+ addEventListenerSpy = jest.spyOn(window, 'addEventListener');
+ removeEventListenerSpy = jest.spyOn(window, 'removeEventListener');
});
afterEach(() => {
+ addEventListenerSpy.mockRestore();
+ removeEventListenerSpy.mockRestore();
wrapper.destroy();
});
- it('it adds review-bar-visible class to body when review bar is mounted', async () => {
- expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(false);
+ describe('when mounted', () => {
+ it('it adds review-bar-visible class to body', async () => {
+ expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(false);
+
+ createComponent();
+
+ expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(true);
+ });
- createComponent();
+ it('it adds a blocking handler to the `beforeunload` window event', () => {
+ expect(addEventListenerSpy).not.toBeCalled();
- expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(true);
+ createComponent();
+
+ expect(addEventListenerSpy).toHaveBeenCalledTimes(1);
+ expect(addEventListenerSpy).toBeCalledWith('beforeunload', expect.any(Function), {
+ capture: true,
+ });
+ });
});
- it('it removes review-bar-visible class to body when review bar is destroyed', async () => {
- createComponent();
+ describe('before destroyed', () => {
+ it('it removes review-bar-visible class to body', async () => {
+ createComponent();
- wrapper.destroy();
+ wrapper.destroy();
- expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(false);
+ expect(document.body.classList.contains(REVIEW_BAR_VISIBLE_CLASS_NAME)).toBe(false);
+ });
+
+ it('it removes the blocking handler from the `beforeunload` window event', () => {
+ createComponent();
+
+ expect(removeEventListenerSpy).not.toBeCalled();
+
+ wrapper.destroy();
+
+ expect(removeEventListenerSpy).toHaveBeenCalledTimes(1);
+ expect(removeEventListenerSpy).toBeCalledWith('beforeunload', expect.any(Function), {
+ capture: true,
+ });
+ });
});
});
diff --git a/spec/frontend/behaviors/components/json_table_spec.js b/spec/frontend/behaviors/components/json_table_spec.js
new file mode 100644
index 00000000000..42b4a051d4d
--- /dev/null
+++ b/spec/frontend/behaviors/components/json_table_spec.js
@@ -0,0 +1,162 @@
+import { GlTable, GlFormInput } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { merge } from 'lodash';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent, RENDER_ALL_SLOTS_TEMPLATE } from 'helpers/stub_component';
+import JSONTable from '~/behaviors/components/json_table.vue';
+
+const TEST_FIELDS = [
+ 'A',
+ {
+ key: 'B',
+ label: 'Second',
+ sortable: true,
+ other: 'foo',
+ },
+ {
+ key: 'C',
+ label: 'Third',
+ },
+ 'D',
+];
+const TEST_ITEMS = [
+ { A: 1, B: 'lorem', C: 2, D: null, E: 'dne' },
+ { A: 2, B: 'ipsum', C: 2, D: null, E: 'dne' },
+ { A: 3, B: 'dolar', C: 2, D: null, E: 'dne' },
+];
+
+describe('behaviors/components/json_table', () => {
+ let wrapper;
+
+ const buildWrapper = ({
+ fields = [],
+ items = [],
+ filter = undefined,
+ caption = undefined,
+ } = {}) => {
+ wrapper = shallowMountExtended(JSONTable, {
+ propsData: {
+ fields,
+ items,
+ hasFilter: filter,
+ caption,
+ },
+ stubs: {
+ GlTable: merge(stubComponent(GlTable), {
+ props: {
+ fields: {
+ type: Array,
+ required: true,
+ },
+ items: {
+ type: Array,
+ required: true,
+ },
+ },
+ template: RENDER_ALL_SLOTS_TEMPLATE,
+ }),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findTable = () => wrapper.findComponent(GlTable);
+ const findTableCaption = () => wrapper.findByTestId('slot-table-caption');
+ const findFilterInput = () => wrapper.findComponent(GlFormInput);
+
+ describe('default', () => {
+ beforeEach(() => {
+ buildWrapper();
+ });
+
+ it('renders gltable', () => {
+ expect(findTable().props()).toEqual({
+ fields: [],
+ items: [],
+ });
+ expect(findTable().attributes()).toMatchObject({
+ filter: '',
+ 'show-empty': '',
+ });
+ });
+
+ it('does not render filter input', () => {
+ expect(findFilterInput().exists()).toBe(false);
+ });
+
+ it('renders caption', () => {
+ expect(findTableCaption().text()).toBe('Generated with JSON data');
+ });
+ });
+
+ describe('with filter', () => {
+ beforeEach(() => {
+ buildWrapper({
+ filter: true,
+ });
+ });
+
+ it('renders filter input', () => {
+ expect(findFilterInput().attributes()).toMatchObject({
+ value: '',
+ placeholder: 'Type to search',
+ });
+ });
+
+ it('when input is changed, updates table filter', async () => {
+ findFilterInput().vm.$emit('input', 'New value!');
+
+ await nextTick();
+
+ expect(findTable().attributes('filter')).toBe('New value!');
+ });
+ });
+
+ describe('with fields', () => {
+ beforeEach(() => {
+ buildWrapper({
+ fields: TEST_FIELDS,
+ items: TEST_ITEMS,
+ });
+ });
+
+ it('passes cleaned fields and items to table', () => {
+ expect(findTable().props()).toEqual({
+ fields: [
+ 'A',
+ {
+ key: 'B',
+ label: 'Second',
+ sortable: true,
+ },
+ {
+ key: 'C',
+ label: 'Third',
+ sortable: false,
+ },
+ 'D',
+ ],
+ items: TEST_ITEMS,
+ });
+ });
+ });
+
+ describe('with full mount', () => {
+ beforeEach(() => {
+ wrapper = mountExtended(JSONTable, {
+ propsData: {
+ fields: [],
+ items: [],
+ },
+ });
+ });
+
+ // We want to make sure all the props are passed down nicely in integration
+ it('renders table without errors', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/behaviors/gl_emoji_spec.js b/spec/frontend/behaviors/gl_emoji_spec.js
index 8842ad636ec..722327e94ba 100644
--- a/spec/frontend/behaviors/gl_emoji_spec.js
+++ b/spec/frontend/behaviors/gl_emoji_spec.js
@@ -121,7 +121,7 @@ describe('gl_emoji', () => {
window.gon.emoji_sprites_css_path = testPath;
expect(document.head.querySelector(`link[href="${testPath}"]`)).toBe(null);
- expect(window.gon.emoji_sprites_css_added).toBeFalsy();
+ expect(window.gon.emoji_sprites_css_added).toBe(undefined);
markupToDomElement(
'<gl-emoji data-fallback-sprite-class="emoji-bomb" data-name="bomb"></gl-emoji>',
diff --git a/spec/frontend/behaviors/markdown/render_json_table_spec.js b/spec/frontend/behaviors/markdown/render_json_table_spec.js
new file mode 100644
index 00000000000..488492479f3
--- /dev/null
+++ b/spec/frontend/behaviors/markdown/render_json_table_spec.js
@@ -0,0 +1,119 @@
+import { nextTick } from 'vue';
+import { renderJSONTable } from '~/behaviors/markdown/render_json_table';
+
+describe('behaviors/markdown/render_json_table', () => {
+ let element;
+
+ const TEST_DATA = {
+ fields: [
+ { label: 'Field 1', key: 'a' },
+ { label: 'F 2', key: 'b' },
+ { label: 'F 3', key: 'c' },
+ ],
+ items: [
+ {
+ a: '1',
+ b: 'b',
+ c: 'c',
+ },
+ {
+ a: '2',
+ b: 'd',
+ c: 'e',
+ },
+ ],
+ };
+ const TEST_LABELS = TEST_DATA.fields.map((x) => x.label);
+
+ const tableAsData = (table) => ({
+ head: Array.from(table.querySelectorAll('thead th')).map((td) => td.textContent),
+ body: Array.from(table.querySelectorAll('tbody > tr')).map((tr) =>
+ Array.from(tr.querySelectorAll('td')).map((x) => x.textContent),
+ ),
+ });
+
+ const createTestSubject = async (json) => {
+ if (element) {
+ throw new Error('element has already been initialized');
+ }
+
+ const parent = document.createElement('div');
+ const pre = document.createElement('pre');
+
+ pre.textContent = json;
+ parent.appendChild(pre);
+
+ document.body.appendChild(parent);
+ renderJSONTable([parent]);
+
+ element = parent;
+
+ jest.runAllTimers();
+
+ await nextTick();
+ };
+
+ const findPres = () => document.querySelectorAll('pre');
+ const findTables = () => document.querySelectorAll('table');
+ const findAlerts = () => document.querySelectorAll('.gl-alert');
+ const findInputs = () => document.querySelectorAll('.gl-form-input');
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ element = null;
+ });
+
+ describe('default', () => {
+ beforeEach(async () => {
+ await createTestSubject(JSON.stringify(TEST_DATA, null, 2));
+ });
+
+ it('removes pre', () => {
+ expect(findPres()).toHaveLength(0);
+ });
+
+ it('replaces pre with table', () => {
+ const tables = findTables();
+
+ expect(tables).toHaveLength(1);
+ expect(tableAsData(tables[0])).toEqual({
+ head: TEST_LABELS,
+ body: [
+ ['1', 'b', 'c'],
+ ['2', 'd', 'e'],
+ ],
+ });
+ });
+
+ it('does not show filter', () => {
+ expect(findInputs()).toHaveLength(0);
+ });
+ });
+
+ describe('with invalid json', () => {
+ beforeEach(() => {
+ createTestSubject('funky but not json');
+ });
+
+ it('preserves pre', () => {
+ expect(findPres()).toHaveLength(1);
+ });
+
+ it('shows alert', () => {
+ const alerts = findAlerts();
+
+ expect(alerts).toHaveLength(1);
+ expect(alerts[0].textContent).toMatchInterpolatedText('Unable to parse JSON');
+ });
+ });
+
+ describe('with filter set', () => {
+ beforeEach(() => {
+ createTestSubject(JSON.stringify({ ...TEST_DATA, filter: true }));
+ });
+
+ it('shows filter', () => {
+ expect(findInputs()).toHaveLength(1);
+ });
+ });
+});
diff --git a/spec/frontend/blob/3d_viewer/mesh_object_spec.js b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
index 60be285039f..3014af073f5 100644
--- a/spec/frontend/blob/3d_viewer/mesh_object_spec.js
+++ b/spec/frontend/blob/3d_viewer/mesh_object_spec.js
@@ -5,7 +5,7 @@ describe('Mesh object', () => {
it('defaults to non-wireframe material', () => {
const object = new MeshObject(new BoxGeometry(10, 10, 10));
- expect(object.material.wireframe).toBeFalsy();
+ expect(object.material.wireframe).toBe(false);
});
it('changes to wirefame material', () => {
@@ -13,7 +13,7 @@ describe('Mesh object', () => {
object.changeMaterial('wireframe');
- expect(object.material.wireframe).toBeTruthy();
+ expect(object.material.wireframe).toBe(true);
});
it('scales object down', () => {
diff --git a/spec/frontend/blob/blob_links_tracking_spec.js b/spec/frontend/blob/blob_links_tracking_spec.js
new file mode 100644
index 00000000000..22e087bc180
--- /dev/null
+++ b/spec/frontend/blob/blob_links_tracking_spec.js
@@ -0,0 +1,60 @@
+import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import addBlobLinksTracking from '~/blob/blob_links_tracking';
+import Tracking from '~/tracking';
+
+describe('Blob links Tracking', () => {
+ const eventName = 'click_link';
+ const label = 'file_line_action';
+
+ const eventsToTrack = [
+ { selector: '.file-line-blame', property: 'blame' },
+ { selector: '.file-line-num', property: 'link' },
+ ];
+
+ const [blameLinkClickEvent, numLinkClickEvent] = eventsToTrack;
+
+ beforeEach(() => {
+ setHTMLFixture(`
+ <div id="blob-content-holder">
+ <div class="line-links diff-line-num">
+ <a href="#L5" class="file-line-blame"></a>
+ <a id="L5" href="#L5" data-line-number="5" class="file-line-num">5</a>
+ </div>
+ <pre id="LC5">Line 5 content</pre>
+ </div>
+ `);
+ addBlobLinksTracking('#blob-content-holder', eventsToTrack);
+ jest.spyOn(Tracking, 'event');
+ });
+
+ afterEach(() => {
+ resetHTMLFixture();
+ });
+
+ it('tracks blame link click event', () => {
+ const blameButton = document.querySelector(blameLinkClickEvent.selector);
+ blameButton.click();
+
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, eventName, {
+ label,
+ property: blameLinkClickEvent.property,
+ });
+ });
+
+ it('tracks num link click event', () => {
+ const numLinkButton = document.querySelector(numLinkClickEvent.selector);
+ numLinkButton.click();
+
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, eventName, {
+ label,
+ property: numLinkClickEvent.property,
+ });
+ });
+
+ it("doesn't fire tracking if the user clicks on any element that is not a link", () => {
+ const codeLine = document.querySelector('#LC5');
+ codeLine.click();
+
+ expect(Tracking.event).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/blob/components/blob_content_spec.js b/spec/frontend/blob/components/blob_content_spec.js
index 8450c6b9332..788ee0a86ab 100644
--- a/spec/frontend/blob/components/blob_content_spec.js
+++ b/spec/frontend/blob/components/blob_content_spec.js
@@ -36,20 +36,20 @@ describe('Blob Content component', () => {
describe('rendering', () => {
it('renders loader if `loading: true`', () => {
createComponent({ loading: true });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(BlobContentError).exists()).toBe(false);
- expect(wrapper.find(RichViewer).exists()).toBe(false);
- expect(wrapper.find(SimpleViewer).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(BlobContentError).exists()).toBe(false);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(false);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(false);
});
it('renders error if there is any in the viewer', () => {
const renderError = 'Oops';
const viewer = { ...SimpleViewerMock, renderError };
createComponent({}, viewer);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(BlobContentError).exists()).toBe(true);
- expect(wrapper.find(RichViewer).exists()).toBe(false);
- expect(wrapper.find(SimpleViewer).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(BlobContentError).exists()).toBe(true);
+ expect(wrapper.findComponent(RichViewer).exists()).toBe(false);
+ expect(wrapper.findComponent(SimpleViewer).exists()).toBe(false);
});
it.each`
@@ -60,7 +60,7 @@ describe('Blob Content component', () => {
'renders $type viewer when activeViewer is $type and no loading or error detected',
({ mock, viewer }) => {
createComponent({}, mock);
- expect(wrapper.find(viewer).exists()).toBe(true);
+ expect(wrapper.findComponent(viewer).exists()).toBe(true);
},
);
@@ -70,13 +70,13 @@ describe('Blob Content component', () => {
${RichBlobContentMock.richData} | ${RichViewerMock} | ${RichViewer}
`('renders correct content that is passed to the component', ({ content, mock, viewer }) => {
createComponent({ content }, mock);
- expect(wrapper.find(viewer).html()).toContain(content);
+ expect(wrapper.findComponent(viewer).html()).toContain(content);
});
});
describe('functionality', () => {
describe('render error', () => {
- const findErrorEl = () => wrapper.find(BlobContentError);
+ const findErrorEl = () => wrapper.findComponent(BlobContentError);
const renderError = BLOB_RENDER_ERRORS.REASONS.COLLAPSED.id;
const viewer = { ...SimpleViewerMock, renderError };
diff --git a/spec/frontend/blob/components/blob_edit_content_spec.js b/spec/frontend/blob/components/blob_edit_content_spec.js
index 9fc2356c018..5017b624292 100644
--- a/spec/frontend/blob/components/blob_edit_content_spec.js
+++ b/spec/frontend/blob/components/blob_edit_content_spec.js
@@ -69,7 +69,7 @@ describe('Blob Header Editing', () => {
});
it('initialises Source Editor', () => {
- const el = wrapper.find({ ref: 'editor' }).element;
+ const el = wrapper.findComponent({ ref: 'editor' }).element;
expect(utils.initSourceEditor).toHaveBeenCalledWith({
el,
blobPath: fileName,
diff --git a/spec/frontend/blob/components/blob_edit_header_spec.js b/spec/frontend/blob/components/blob_edit_header_spec.js
index b1ce0e9a4c5..c84b5896348 100644
--- a/spec/frontend/blob/components/blob_edit_header_spec.js
+++ b/spec/frontend/blob/components/blob_edit_header_spec.js
@@ -16,7 +16,7 @@ describe('Blob Header Editing', () => {
});
};
const findDeleteButton = () =>
- wrapper.findAll(GlButton).wrappers.find((x) => x.text() === 'Delete file');
+ wrapper.findAllComponents(GlButton).wrappers.find((x) => x.text() === 'Delete file');
beforeEach(() => {
createComponent();
@@ -32,7 +32,7 @@ describe('Blob Header Editing', () => {
});
it('contains a form input field', () => {
- expect(wrapper.find(GlFormInput).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFormInput).exists()).toBe(true);
});
it('does not show delete button', () => {
@@ -42,7 +42,7 @@ describe('Blob Header Editing', () => {
describe('functionality', () => {
it('emits input event when the blob name is changed', async () => {
- const inputComponent = wrapper.find(GlFormInput);
+ const inputComponent = wrapper.findComponent(GlFormInput);
const newValue = 'bar.txt';
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
diff --git a/spec/frontend/blob/components/blob_header_default_actions_spec.js b/spec/frontend/blob/components/blob_header_default_actions_spec.js
index aa538facae2..0f015715dc2 100644
--- a/spec/frontend/blob/components/blob_header_default_actions_spec.js
+++ b/spec/frontend/blob/components/blob_header_default_actions_spec.js
@@ -30,8 +30,8 @@ describe('Blob Header Default Actions', () => {
beforeEach(() => {
createComponent();
- btnGroup = wrapper.find(GlButtonGroup);
- buttons = wrapper.findAll(GlButton);
+ btnGroup = wrapper.findComponent(GlButtonGroup);
+ buttons = wrapper.findAllComponents(GlButton);
});
afterEach(() => {
@@ -69,9 +69,9 @@ describe('Blob Header Default Actions', () => {
createComponent({
activeViewer: RICH_BLOB_VIEWER,
});
- buttons = wrapper.findAll(GlButton);
+ buttons = wrapper.findAllComponents(GlButton);
- expect(buttons.at(0).attributes('disabled')).toBeTruthy();
+ expect(buttons.at(0).attributes('disabled')).toBe('true');
});
it('does not render the copy button if a rendering error is set', () => {
diff --git a/spec/frontend/blob/components/blob_header_filepath_spec.js b/spec/frontend/blob/components/blob_header_filepath_spec.js
index 8220b598ff6..8c32cba1ba4 100644
--- a/spec/frontend/blob/components/blob_header_filepath_spec.js
+++ b/spec/frontend/blob/components/blob_header_filepath_spec.js
@@ -25,7 +25,7 @@ describe('Blob Header Filepath', () => {
wrapper.destroy();
});
- const findBadge = () => wrapper.find(GlBadge);
+ const findBadge = () => wrapper.findComponent(GlBadge);
describe('rendering', () => {
it('matches the snapshot', () => {
@@ -46,7 +46,7 @@ describe('Blob Header Filepath', () => {
it('renders copy-to-clipboard icon that copies path of the Blob', () => {
createComponent();
- const btn = wrapper.find(ClipboardButton);
+ const btn = wrapper.findComponent(ClipboardButton);
expect(btn.exists()).toBe(true);
expect(btn.vm.text).toBe(MockBlob.path);
});
diff --git a/spec/frontend/blob/components/blob_header_spec.js b/spec/frontend/blob/components/blob_header_spec.js
index ee42c2387ae..46740958090 100644
--- a/spec/frontend/blob/components/blob_header_spec.js
+++ b/spec/frontend/blob/components/blob_header_spec.js
@@ -31,7 +31,7 @@ describe('Blob Header Default Actions', () => {
});
describe('rendering', () => {
- const findDefaultActions = () => wrapper.find(DefaultActions);
+ const findDefaultActions = () => wrapper.findComponent(DefaultActions);
const slots = {
prepend: 'Foo Prepend',
@@ -45,17 +45,17 @@ describe('Blob Header Default Actions', () => {
it('renders all components', () => {
createComponent();
- expect(wrapper.find(TableContents).exists()).toBe(true);
- expect(wrapper.find(ViewerSwitcher).exists()).toBe(true);
+ expect(wrapper.findComponent(TableContents).exists()).toBe(true);
+ expect(wrapper.findComponent(ViewerSwitcher).exists()).toBe(true);
expect(findDefaultActions().exists()).toBe(true);
- expect(wrapper.find(BlobFilepath).exists()).toBe(true);
+ expect(wrapper.findComponent(BlobFilepath).exists()).toBe(true);
});
it('does not render viewer switcher if the blob has only the simple viewer', () => {
createComponent({
richViewer: null,
});
- expect(wrapper.find(ViewerSwitcher).exists()).toBe(false);
+ expect(wrapper.findComponent(ViewerSwitcher).exists()).toBe(false);
});
it('does not render viewer switcher if a corresponding prop is passed', () => {
@@ -66,7 +66,7 @@ describe('Blob Header Default Actions', () => {
hideViewerSwitcher: true,
},
);
- expect(wrapper.find(ViewerSwitcher).exists()).toBe(false);
+ expect(wrapper.findComponent(ViewerSwitcher).exists()).toBe(false);
});
it('does not render default actions is corresponding prop is passed', () => {
@@ -77,7 +77,7 @@ describe('Blob Header Default Actions', () => {
hideDefaultActions: true,
},
);
- expect(wrapper.find(DefaultActions).exists()).toBe(false);
+ expect(wrapper.findComponent(DefaultActions).exists()).toBe(false);
});
Object.keys(slots).forEach((slot) => {
diff --git a/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
index 91baaf3ea69..1eac0733646 100644
--- a/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
+++ b/spec/frontend/blob/components/blob_header_viewer_switcher_spec.js
@@ -35,8 +35,8 @@ describe('Blob Header Viewer Switcher', () => {
beforeEach(() => {
createComponent();
- btnGroup = wrapper.find(GlButtonGroup);
- buttons = wrapper.findAll(GlButton);
+ btnGroup = wrapper.findComponent(GlButtonGroup);
+ buttons = wrapper.findAllComponents(GlButton);
});
it('renders gl-button-group component', () => {
@@ -58,7 +58,7 @@ describe('Blob Header Viewer Switcher', () => {
function factory(propsData = {}) {
createComponent(propsData);
- buttons = wrapper.findAll(GlButton);
+ buttons = wrapper.findAllComponents(GlButton);
simpleBtn = buttons.at(0);
richBtn = buttons.at(1);
diff --git a/spec/frontend/blob/notebook/notebook_viever_spec.js b/spec/frontend/blob/notebook/notebook_viever_spec.js
index 93406db2675..ea4badc03fb 100644
--- a/spec/frontend/blob/notebook/notebook_viever_spec.js
+++ b/spec/frontend/blob/notebook/notebook_viever_spec.js
@@ -31,10 +31,10 @@ describe('iPython notebook renderer', () => {
wrapper = shallowMount(component, { propsData: { endpoint, relativeRawPath } });
};
- const findLoading = () => wrapper.find(GlLoadingIcon);
- const findNotebookLab = () => wrapper.find(NotebookLab);
- const findLoadErrorMessage = () => wrapper.find({ ref: 'loadErrorMessage' });
- const findParseErrorMessage = () => wrapper.find({ ref: 'parsingErrorMessage' });
+ const findLoading = () => wrapper.findComponent(GlLoadingIcon);
+ const findNotebookLab = () => wrapper.findComponent(NotebookLab);
+ const findLoadErrorMessage = () => wrapper.findComponent({ ref: 'loadErrorMessage' });
+ const findParseErrorMessage = () => wrapper.findComponent({ ref: 'parsingErrorMessage' });
beforeEach(() => {
mock = new MockAdapter(axios);
diff --git a/spec/frontend/blob/pdf/pdf_viewer_spec.js b/spec/frontend/blob/pdf/pdf_viewer_spec.js
index e332ea49fa6..23227df6357 100644
--- a/spec/frontend/blob/pdf/pdf_viewer_spec.js
+++ b/spec/frontend/blob/pdf/pdf_viewer_spec.js
@@ -18,9 +18,9 @@ describe('PDF renderer', () => {
});
};
- const findLoading = () => wrapper.find(GlLoadingIcon);
- const findPdfLab = () => wrapper.find(PdfLab);
- const findLoadError = () => wrapper.find({ ref: 'loadError' });
+ const findLoading = () => wrapper.findComponent(GlLoadingIcon);
+ const findPdfLab = () => wrapper.findComponent(PdfLab);
+ const findLoadError = () => wrapper.findComponent({ ref: 'loadError' });
beforeEach(() => {
mountComponent();
diff --git a/spec/frontend/blob/pipeline_tour_success_modal_spec.js b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
index 750dd8f0a72..81b38cfc278 100644
--- a/spec/frontend/blob/pipeline_tour_success_modal_spec.js
+++ b/spec/frontend/blob/pipeline_tour_success_modal_spec.js
@@ -52,7 +52,7 @@ describe('PipelineTourSuccessModal', () => {
});
it('renders the path from the commit cookie for back to the merge request button', () => {
- const goToMrBtn = wrapper.find({ ref: 'goToMergeRequest' });
+ const goToMrBtn = wrapper.findComponent({ ref: 'goToMergeRequest' });
expect(goToMrBtn.attributes('href')).toBe(expectedMrPath);
});
@@ -67,16 +67,16 @@ describe('PipelineTourSuccessModal', () => {
});
it('renders the path from projectMergeRequestsPath for back to the merge request button', () => {
- const goToMrBtn = wrapper.find({ ref: 'goToMergeRequest' });
+ const goToMrBtn = wrapper.findComponent({ ref: 'goToMergeRequest' });
expect(goToMrBtn.attributes('href')).toBe(expectedMrPath);
});
});
it('has expected structure', () => {
- const modal = wrapper.find(GlModal);
- const sprintf = modal.find(GlSprintf);
- const emoji = modal.find(GlEmoji);
+ const modal = wrapper.findComponent(GlModal);
+ const sprintf = modal.findComponent(GlSprintf);
+ const emoji = modal.findComponent(GlEmoji);
expect(wrapper.text()).toContain("That's it, well done!");
expect(sprintf.exists()).toBe(true);
@@ -84,7 +84,7 @@ describe('PipelineTourSuccessModal', () => {
});
it('renders the link for codeQualityLink', () => {
- expect(wrapper.find(GlLink).attributes('href')).toBe('/code-quality-link');
+ expect(wrapper.findComponent(GlLink).attributes('href')).toBe('/code-quality-link');
});
it('calls to remove cookie', () => {
@@ -103,7 +103,7 @@ describe('PipelineTourSuccessModal', () => {
it('send an event when go to pipelines is clicked', () => {
trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- const goToBtn = wrapper.find({ ref: 'goToPipelines' });
+ const goToBtn = wrapper.findComponent({ ref: 'goToPipelines' });
triggerEvent(goToBtn.element);
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
@@ -115,7 +115,7 @@ describe('PipelineTourSuccessModal', () => {
it('sends an event when back to the merge request is clicked', () => {
trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
- const goToBtn = wrapper.find({ ref: 'goToMergeRequest' });
+ const goToBtn = wrapper.findComponent({ ref: 'goToMergeRequest' });
triggerEvent(goToBtn.element);
expect(trackingSpy).toHaveBeenCalledWith('_category_', 'click_button', {
diff --git a/spec/frontend/blob/sketch/index_spec.js b/spec/frontend/blob/sketch/index_spec.js
index 5e1922a24f4..e8d1f724c4b 100644
--- a/spec/frontend/blob/sketch/index_spec.js
+++ b/spec/frontend/blob/sketch/index_spec.js
@@ -69,7 +69,7 @@ describe('Sketch viewer', () => {
const img = document.querySelector('#js-sketch-viewer img');
expect(img).not.toBeNull();
- expect(img.classList.contains('img-fluid')).toBeTruthy();
+ expect(img.classList.contains('img-fluid')).toBe(true);
});
it('renders link to image', () => {
diff --git a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
index 7e13994f2b7..6b329dc078a 100644
--- a/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
+++ b/spec/frontend/blob/suggest_gitlab_ci_yml/components/popover_spec.js
@@ -98,7 +98,7 @@ describe('Suggest gitlab-ci.yml Popover', () => {
const expectedAction = 'click_button';
const expectedProperty = 'owner';
const expectedValue = '10';
- const dismissButton = wrapper.find(GlButton);
+ const dismissButton = wrapper.findComponent(GlButton);
trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
triggerEvent(dismissButton.element);
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index c6de3ee69f3..985902b4a3b 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -238,7 +238,7 @@ describe('Board card component', () => {
});
it('renders assignee', () => {
- expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(true);
+ expect(wrapper.find('.board-card-assignee .gl-avatar').exists()).toBe(true);
});
it('sets title', () => {
@@ -336,7 +336,7 @@ describe('Board card component', () => {
});
it('renders all three assignees', () => {
- expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(3);
+ expect(wrapper.findAll('.board-card-assignee .gl-avatar').length).toEqual(3);
});
describe('more than three assignees', () => {
@@ -362,7 +362,7 @@ describe('Board card component', () => {
});
it('renders two assignees', () => {
- expect(wrapper.findAll('.board-card-assignee .avatar').length).toEqual(2);
+ expect(wrapper.findAll('.board-card-assignee .gl-avatar').length).toEqual(2);
});
it('renders 99+ avatar counter', async () => {
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index fd9d2b6823d..9b0c0b93ffb 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -56,7 +56,7 @@ describe('Board list component', () => {
});
it('renders issues', () => {
- expect(wrapper.findAll(BoardCard).length).toBe(1);
+ expect(wrapper.findAllComponents(BoardCard).length).toBe(1);
});
it('sets data attribute with issue id', () => {
diff --git a/spec/frontend/boards/components/board_add_new_column_form_spec.js b/spec/frontend/boards/components/board_add_new_column_form_spec.js
index 3b26ca57d6f..0b3c6cb24c4 100644
--- a/spec/frontend/boards/components/board_add_new_column_form_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_form_spec.js
@@ -60,8 +60,8 @@ describe('Board card layout', () => {
});
const formTitle = () => wrapper.findByTestId('board-add-column-form-title').text();
- const findSearchInput = () => wrapper.find(GlSearchBoxByType);
- const findSearchLabel = () => wrapper.find(GlFormGroup);
+ const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findSearchLabelFormGroup = () => wrapper.findComponent(GlFormGroup);
const cancelButton = () => wrapper.findByTestId('cancelAddNewColumn');
const submitButton = () => wrapper.findByTestId('addNewColumnButton');
const findDropdown = () => wrapper.findComponent(GlDropdown);
@@ -121,10 +121,17 @@ describe('Board card layout', () => {
mountComponent(props);
- expect(findSearchLabel().attributes('label')).toEqual(props.searchLabel);
+ expect(findSearchLabelFormGroup().attributes('label')).toEqual(props.searchLabel);
expect(findSearchInput().attributes('placeholder')).toEqual(props.searchPlaceholder);
});
+ it('does not show the dropdown as invalid by default', () => {
+ mountComponent();
+
+ expect(findSearchLabelFormGroup().attributes('state')).toBe('true');
+ expect(findDropdown().props('toggleClass')).not.toContain('gl-inset-border-1-red-400!');
+ });
+
it('emits filter event on input', () => {
mountComponent();
@@ -137,13 +144,13 @@ describe('Board card layout', () => {
});
describe('Add list button', () => {
- it('is disabled if no item is selected', () => {
+ it('is enabled by default', () => {
mountComponent();
- expect(submitButton().props('disabled')).toBe(true);
+ expect(submitButton().props('disabled')).toBe(false);
});
- it('emits add-list event on click', () => {
+ it('emits add-list event on click when an ID is selected', () => {
mountComponent({
selectedId: mockLabelList.label.id,
});
@@ -152,5 +159,16 @@ describe('Board card layout', () => {
expect(wrapper.emitted('add-list')).toEqual([[]]);
});
+
+ it('does not emit the add-list event on click and shows the dropdown as invalid when no ID is selected', async () => {
+ mountComponent();
+
+ await submitButton().vm.$emit('click');
+
+ expect(findSearchLabelFormGroup().attributes('state')).toBeUndefined();
+ expect(findDropdown().props('toggleClass')).toContain('gl-inset-border-1-red-400!');
+
+ expect(wrapper.emitted('add-list')).toBeUndefined();
+ });
});
});
diff --git a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
index 7dd02bf1d35..354eb7bff16 100644
--- a/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_trigger_spec.js
@@ -39,7 +39,7 @@ describe('BoardAddNewColumnTrigger', () => {
});
it('renders an enabled button', () => {
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.props('disabled')).toBe(false);
});
@@ -47,7 +47,7 @@ describe('BoardAddNewColumnTrigger', () => {
describe('when button is disabled', () => {
it('shows the tooltip', async () => {
- wrapper.find(GlButton).vm.$emit('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
await nextTick();
diff --git a/spec/frontend/boards/components/board_blocked_icon_spec.js b/spec/frontend/boards/components/board_blocked_icon_spec.js
index 7a5c49bd488..cf4ba07da16 100644
--- a/spec/frontend/boards/components/board_blocked_icon_spec.js
+++ b/spec/frontend/boards/components/board_blocked_icon_spec.js
@@ -23,9 +23,9 @@ describe('BoardBlockedIcon', () => {
let wrapper;
let mockApollo;
- const findGlIcon = () => wrapper.find(GlIcon);
- const findGlPopover = () => wrapper.find(GlPopover);
- const findGlLink = () => wrapper.find(GlLink);
+ const findGlIcon = () => wrapper.findComponent(GlIcon);
+ const findGlPopover = () => wrapper.findComponent(GlPopover);
+ const findGlLink = () => wrapper.findComponent(GlLink);
const findPopoverTitle = () => wrapper.findByTestId('popover-title');
const findIssuableTitle = () => wrapper.findByTestId('issuable-title');
const findHiddenBlockingCount = () => wrapper.findByTestId('hidden-blocking-count');
@@ -114,7 +114,7 @@ describe('BoardBlockedIcon', () => {
it('should display a loading spinner while loading', () => {
createWrapper({ loading: true });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('should not query for blocking issuables by default', async () => {
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 17a5383a31e..bb1e63a581e 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -88,7 +88,7 @@ describe('Board card', () => {
createStore({ initialState: { isShowingLabels: true } });
mountComponent({ mountFn: mount, stubs: {} });
- wrapper.find(GlLabel).trigger('mouseup');
+ wrapper.findComponent(GlLabel).trigger('mouseup');
expect(mockActions.toggleBoardItem).toHaveBeenCalledTimes(0);
});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 368c7d561f8..7e35c39cd48 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -108,7 +108,7 @@ describe('BoardContentSidebar', () => {
createStore({ mockGetters: { isSidebarOpen: () => false } });
createComponent();
- expect(wrapper.findComponent(GlDrawer).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
});
it('applies an open attribute', () => {
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index f535679b8a0..97d9e08f5d4 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -67,12 +67,12 @@ describe('BoardContent', () => {
});
it('renders BoardContentSidebar', () => {
- expect(wrapper.find(BoardContentSidebar).exists()).toBe(true);
+ expect(wrapper.findComponent(BoardContentSidebar).exists()).toBe(true);
});
it('does not display EpicsSwimlanes component', () => {
- expect(wrapper.find(EpicsSwimlanes).exists()).toBe(false);
- expect(wrapper.find(GlAlert).exists()).toBe(false);
+ expect(wrapper.findComponent(EpicsSwimlanes).exists()).toBe(false);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
});
@@ -82,7 +82,7 @@ describe('BoardContent', () => {
});
it('does not render BoardContentSidebar', () => {
- expect(wrapper.find(BoardContentSidebar).exists()).toBe(false);
+ expect(wrapper.findComponent(BoardContentSidebar).exists()).toBe(false);
});
});
@@ -92,7 +92,7 @@ describe('BoardContent', () => {
});
it('renders draggable component', () => {
- expect(wrapper.find(Draggable).exists()).toBe(true);
+ expect(wrapper.findComponent(Draggable).exists()).toBe(true);
});
});
@@ -102,7 +102,7 @@ describe('BoardContent', () => {
});
it('does not render draggable component', () => {
- expect(wrapper.find(Draggable).exists()).toBe(false);
+ expect(wrapper.findComponent(Draggable).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 2f9677680eb..50901f3fe84 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -83,7 +83,7 @@ describe('Board List Header Component', () => {
const isCollapsed = () => wrapper.vm.list.collapsed;
- const findAddIssueButton = () => wrapper.find({ ref: 'newIssueBtn' });
+ const findAddIssueButton = () => wrapper.findComponent({ ref: 'newIssueBtn' });
const findTitle = () => wrapper.find('.board-title');
const findCaret = () => wrapper.findByTestId('board-title-caret');
diff --git a/spec/frontend/boards/components/board_new_item_spec.js b/spec/frontend/boards/components/board_new_item_spec.js
index 86cebc8a719..f4e9901aad2 100644
--- a/spec/frontend/boards/components/board_new_item_spec.js
+++ b/spec/frontend/boards/components/board_new_item_spec.js
@@ -44,7 +44,7 @@ describe('BoardNewItem', () => {
it('finds an enabled create button', async () => {
expect(wrapper.findByTestId('create-button').props('disabled')).toBe(true);
- wrapper.find(GlFormInput).vm.$emit('input', 'hello');
+ wrapper.findComponent(GlFormInput).vm.$emit('input', 'hello');
await nextTick();
expect(wrapper.findByTestId('create-button').props('disabled')).toBe(false);
@@ -53,7 +53,7 @@ describe('BoardNewItem', () => {
describe('when the user types in a string with only spaces', () => {
it('disables the Create Issue button', async () => {
- wrapper.find(GlFormInput).vm.$emit('input', ' ');
+ wrapper.findComponent(GlFormInput).vm.$emit('input', ' ');
await nextTick();
@@ -93,7 +93,7 @@ describe('BoardNewItem', () => {
titleInput().setValue('Foo');
await glForm().trigger('submit');
- expect(wrapper.emitted('form-submit')).toBeTruthy();
+ expect(wrapper.emitted('form-submit')).toHaveLength(1);
expect(wrapper.emitted('form-submit')[0]).toEqual([
{
title: 'Foo',
@@ -131,7 +131,7 @@ describe('BoardNewItem', () => {
await glForm().trigger('reset');
expect(titleInput().element.value).toBe('');
- expect(wrapper.emitted('form-cancel')).toBeTruthy();
+ expect(wrapper.emitted('form-cancel')).toHaveLength(1);
});
});
});
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
index 7f40c426b30..4171a6236de 100644
--- a/spec/frontend/boards/components/board_settings_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -57,10 +57,10 @@ describe('BoardSettingsSidebar', () => {
}),
);
};
- const findLabel = () => wrapper.find(GlLabel);
- const findDrawer = () => wrapper.find(GlDrawer);
- const findModal = () => wrapper.find(GlModal);
- const findRemoveButton = () => wrapper.find(GlButton);
+ const findLabel = () => wrapper.findComponent(GlLabel);
+ const findDrawer = () => wrapper.findComponent(GlDrawer);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findRemoveButton = () => wrapper.findComponent(GlButton);
afterEach(() => {
jest.restoreAllMocks();
@@ -71,7 +71,7 @@ describe('BoardSettingsSidebar', () => {
it('finds a MountingPortal component', () => {
createComponent();
- expect(wrapper.find(MountingPortal).props()).toMatchObject({
+ expect(wrapper.findComponent(MountingPortal).props()).toMatchObject({
mountTo: '#js-right-sidebar-portal',
append: true,
name: 'board-settings-sidebar',
@@ -93,7 +93,7 @@ describe('BoardSettingsSidebar', () => {
await nextTick();
- expect(wrapper.find(GlDrawer).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
});
it('closes the sidebar when emitting the correct event', async () => {
@@ -103,7 +103,7 @@ describe('BoardSettingsSidebar', () => {
await nextTick();
- expect(wrapper.find(GlDrawer).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDrawer).props('open')).toBe(false);
});
});
@@ -150,7 +150,7 @@ describe('BoardSettingsSidebar', () => {
it('does not render GlDrawer', () => {
createComponent({ sidebarType: '' });
- expect(findDrawer().exists()).toBe(false);
+ expect(findDrawer().props('open')).toBe(false);
});
});
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index d91e81fe4d0..f3be66db36f 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -53,7 +53,7 @@ describe('BoardsSelector', () => {
};
const fillSearchBox = (filterTerm) => {
- const searchBox = wrapper.find({ ref: 'searchBox' });
+ const searchBox = wrapper.findComponent({ ref: 'searchBox' });
const searchBoxInput = searchBox.find('input');
searchBoxInput.setValue(filterTerm);
searchBoxInput.trigger('input');
diff --git a/spec/frontend/boards/components/new_board_button_spec.js b/spec/frontend/boards/components/new_board_button_spec.js
index 075fe225ec2..2bbd3797abf 100644
--- a/spec/frontend/boards/components/new_board_button_spec.js
+++ b/spec/frontend/boards/components/new_board_button_spec.js
@@ -53,13 +53,13 @@ describe('NewBoardButton', () => {
it('renders nothing when `canAdminBoard` is `false`', () => {
wrapper = createComponent({ canAdminBoard: false });
- expect(wrapper.find(GlButton).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(false);
});
it('renders nothing when `multipleIssueBoardsAvailable` is `false`', () => {
wrapper = createComponent({ multipleIssueBoardsAvailable: false });
- expect(wrapper.find(GlButton).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(false);
});
it('emits `showBoardModal` when button is clicked', () => {
@@ -67,7 +67,7 @@ describe('NewBoardButton', () => {
wrapper = createComponent();
- wrapper.find(GlButton).vm.$emit('click', { preventDefault: () => {} });
+ wrapper.findComponent(GlButton).vm.$emit('click', { preventDefault: () => {} });
expect(eventHub.$emit).toHaveBeenCalledWith('showBoardModal', 'new');
});
diff --git a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
index 0c76c711b3a..5e2222ac3d7 100644
--- a/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_editable_item_spec.js
@@ -6,7 +6,7 @@ import BoardSidebarItem from '~/boards/components/sidebar/board_editable_item.vu
describe('boards sidebar remove issue', () => {
let wrapper;
- const findLoader = () => wrapper.find(GlLoadingIcon);
+ const findLoader = () => wrapper.findComponent(GlLoadingIcon);
const findEditButton = () => wrapper.find('[data-testid="edit-button"]');
const findTitle = () => wrapper.find('[data-testid="title"]');
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
index 7c8996be0b8..5c435643425 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
@@ -47,7 +47,7 @@ describe('BoardSidebarTimeTracker', () => {
(timeTrackingLimitToHours) => {
createComponent({ provide: { timeTrackingLimitToHours } });
- expect(wrapper.find(IssuableTimeTracker).props()).toEqual({
+ expect(wrapper.findComponent(IssuableTimeTracker).props()).toEqual({
limitToHours: timeTrackingLimitToHours,
showCollapsed: false,
issuableId: '1',
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
index 5364d929c38..cc1e5de15c1 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
@@ -46,10 +46,10 @@ describe('~/boards/components/sidebar/board_sidebar_title.vue', () => {
});
};
- const findForm = () => wrapper.find(GlForm);
- const findAlert = () => wrapper.find(GlAlert);
- const findFormInput = () => wrapper.find(GlFormInput);
- const findEditableItem = () => wrapper.find(BoardEditableItem);
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFormInput = () => wrapper.findComponent(GlFormInput);
+ const findEditableItem = () => wrapper.findComponent(BoardEditableItem);
const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
const findTitle = () => wrapper.find('[data-testid="item-title"]');
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
diff --git a/spec/frontend/boards/project_select_spec.js b/spec/frontend/boards/project_select_spec.js
index c45cd545155..7ff34ffdf9e 100644
--- a/spec/frontend/boards/project_select_spec.js
+++ b/spec/frontend/boards/project_select_spec.js
@@ -10,7 +10,6 @@ import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import ProjectSelect from '~/boards/components/project_select.vue';
import defaultState from '~/boards/stores/state';
-import waitForPromises from 'helpers/wait_for_promises';
import { mockList, mockActiveGroupProjects } from './mock_data';
@@ -23,9 +22,9 @@ describe('ProjectSelect component', () => {
const findLabel = () => wrapper.find("[data-testid='header-label']");
const findGlDropdown = () => wrapper.findComponent(GlDropdown);
const findGlDropdownLoadingIcon = () =>
- findGlDropdown().find('button:first-child').find(GlLoadingIcon);
- const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
- const findGlDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ findGlDropdown().find('button:first-child').findComponent(GlLoadingIcon);
+ const findGlSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
+ const findGlDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
const findInMenuLoadingIcon = () => wrapper.find("[data-testid='dropdown-text-loading-icon']");
const findEmptySearchMessage = () => wrapper.find("[data-testid='empty-result-message']");
@@ -133,7 +132,7 @@ describe('ProjectSelect component', () => {
const dropdownToggle = findGlDropdown().find('.dropdown-toggle');
await dropdownToggle.trigger('click');
- await waitForPromises();
+ jest.runOnlyPendingTimers();
await nextTick();
const searchInput = findGlDropdown().findComponent(GlFormInput).element;
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 7d79993a0ee..1606ca09d8f 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -518,17 +518,6 @@ describe('Board Store Mutations', () => {
expect(state.boardItemsByListId[payload.listId]).toEqual(listState);
});
-
- it("updates the list's items count", () => {
- expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(1);
-
- mutations.ADD_BOARD_ITEM_TO_LIST(state, {
- itemId: mockIssue2.id,
- listId: mockList.id,
- });
-
- expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(2);
- });
});
describe('REMOVE_BOARD_ITEM_FROM_LIST', () => {
@@ -536,8 +525,7 @@ describe('Board Store Mutations', () => {
setBoardsListsState();
});
- it("removes an item from a list and updates the list's items count", () => {
- expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(1);
+ it('removes an item from a list', () => {
expect(state.boardItemsByListId['gid://gitlab/List/1']).toContain(mockIssue.id);
mutations.REMOVE_BOARD_ITEM_FROM_LIST(state, {
@@ -546,7 +534,6 @@ describe('Board Store Mutations', () => {
});
expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue.id);
- expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(0);
});
});
diff --git a/spec/frontend/captcha/wait_for_captcha_to_be_solved_spec.js b/spec/frontend/captcha/wait_for_captcha_to_be_solved_spec.js
index 08d031a4fa7..2263d2bbeed 100644
--- a/spec/frontend/captcha/wait_for_captcha_to_be_solved_spec.js
+++ b/spec/frontend/captcha/wait_for_captcha_to_be_solved_spec.js
@@ -1,3 +1,4 @@
+import { nextTick } from 'vue';
import CaptchaModal from '~/captcha/captcha_modal.vue';
import { waitForCaptchaToBeSolved } from '~/captcha/wait_for_captcha_to_be_solved';
@@ -15,7 +16,7 @@ describe('waitForCaptchaToBeSolved', () => {
it('opens a modal, resolves with captcha response on success', async () => {
CaptchaModal.mounted.mockImplementationOnce(function mounted() {
- requestAnimationFrame(() => {
+ return nextTick().then(() => {
this.$emit('receivedCaptchaResponse', response);
this.$emit('hidden');
});
@@ -36,7 +37,7 @@ describe('waitForCaptchaToBeSolved', () => {
it("opens a modal, rejects with error in case the captcha isn't solved", async () => {
CaptchaModal.mounted.mockImplementationOnce(function mounted() {
- requestAnimationFrame(() => {
+ return nextTick().then(() => {
this.$emit('receivedCaptchaResponse', null);
this.$emit('hidden');
});
diff --git a/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js b/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
new file mode 100644
index 00000000000..920ceaefb70
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
@@ -0,0 +1,178 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { resolvers } from '~/ci_variable_list/graphql/resolvers';
+
+import ciAdminVariables from '~/ci_variable_list/components/ci_admin_variables.vue';
+import ciVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
+import ciVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import getAdminVariables from '~/ci_variable_list/graphql/queries/variables.query.graphql';
+
+import addAdminVariable from '~/ci_variable_list/graphql/mutations/admin_add_variable.mutation.graphql';
+import deleteAdminVariable from '~/ci_variable_list/graphql/mutations/admin_delete_variable.mutation.graphql';
+import updateAdminVariable from '~/ci_variable_list/graphql/mutations/admin_update_variable.mutation.graphql';
+
+import { genericMutationErrorText, variableFetchErrorText } from '~/ci_variable_list/constants';
+
+import { mockAdminVariables, newVariable } from '../mocks';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+const mockProvide = {
+ endpoint: '/variables',
+};
+
+describe('Ci Admin Variable list', () => {
+ let wrapper;
+
+ let mockApollo;
+ let mockVariables;
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findCiTable = () => wrapper.findComponent(GlTable);
+ const findCiSettings = () => wrapper.findComponent(ciVariableSettings);
+
+ // eslint-disable-next-line consistent-return
+ const createComponentWithApollo = async ({ isLoading = false } = {}) => {
+ const handlers = [[getAdminVariables, mockVariables]];
+
+ mockApollo = createMockApollo(handlers, resolvers);
+
+ wrapper = shallowMount(ciAdminVariables, {
+ provide: mockProvide,
+ apolloProvider: mockApollo,
+ stubs: { ciVariableSettings, ciVariableTable },
+ });
+
+ if (!isLoading) {
+ return waitForPromises();
+ }
+ };
+
+ beforeEach(() => {
+ mockVariables = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('while queries are being fetch', () => {
+ beforeEach(() => {
+ createComponentWithApollo({ isLoading: true });
+ });
+
+ it('shows a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findCiTable().exists()).toBe(false);
+ });
+ });
+
+ describe('when queries are resolved', () => {
+ describe('successfuly', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockAdminVariables);
+
+ await createComponentWithApollo();
+ });
+
+ it('passes down the expected environments as props', () => {
+ expect(findCiSettings().props('environments')).toEqual([]);
+ });
+
+ it('passes down the expected variables as props', () => {
+ expect(findCiSettings().props('variables')).toEqual(
+ mockAdminVariables.data.ciVariables.nodes,
+ );
+ });
+
+ it('createFlash was not called', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('with an error for variables', () => {
+ beforeEach(async () => {
+ mockVariables.mockRejectedValue();
+
+ await createComponentWithApollo();
+ });
+
+ it('calls createFlash with the expected error message', () => {
+ expect(createFlash).toHaveBeenCalledWith({ message: variableFetchErrorText });
+ });
+ });
+ });
+
+ describe('mutations', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockAdminVariables);
+
+ await createComponentWithApollo();
+ });
+ it.each`
+ actionName | mutation | event
+ ${'add'} | ${addAdminVariable} | ${'add-variable'}
+ ${'update'} | ${updateAdminVariable} | ${'update-variable'}
+ ${'delete'} | ${deleteAdminVariable} | ${'delete-variable'}
+ `(
+ 'calls the right mutation when user performs $actionName variable',
+ async ({ event, mutation }) => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
+ await findCiSettings().vm.$emit(event, newVariable);
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation,
+ variables: {
+ endpoint: mockProvide.endpoint,
+ variable: newVariable,
+ },
+ });
+ },
+ );
+
+ it.each`
+ actionName | event | mutationName
+ ${'add'} | ${'add-variable'} | ${'addAdminVariable'}
+ ${'update'} | ${'update-variable'} | ${'updateAdminVariable'}
+ ${'delete'} | ${'delete-variable'} | ${'deleteAdminVariable'}
+ `(
+ 'throws with the specific graphql error if present when user performs $actionName variable',
+ async ({ event, mutationName }) => {
+ const graphQLErrorMessage = 'There is a problem with this graphQL action';
+ jest
+ .spyOn(wrapper.vm.$apollo, 'mutate')
+ .mockResolvedValue({ data: { [mutationName]: { errors: [graphQLErrorMessage] } } });
+ await findCiSettings().vm.$emit(event, newVariable);
+ await nextTick();
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({ message: graphQLErrorMessage });
+ },
+ );
+
+ it.each`
+ actionName | event
+ ${'add'} | ${'add-variable'}
+ ${'update'} | ${'update-variable'}
+ ${'delete'} | ${'delete-variable'}
+ `(
+ 'throws generic error when the mutation fails with no graphql errors and user performs $actionName variable',
+ async ({ event }) => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockImplementationOnce(() => {
+ throw new Error();
+ });
+ await findCiSettings().vm.$emit(event, newVariable);
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({ message: genericMutationErrorText });
+ },
+ );
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
new file mode 100644
index 00000000000..e9966576cab
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
@@ -0,0 +1,139 @@
+import { GlDropdown, GlDropdownItem, GlIcon, GlSearchBoxByType } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { allEnvironments } from '~/ci_variable_list/constants';
+import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
+
+describe('Ci environments dropdown', () => {
+ let wrapper;
+
+ const envs = ['dev', 'prod', 'staging'];
+ const defaultProps = { environments: envs, selectedEnvironmentScope: '' };
+
+ const findDropdownText = () => wrapper.findComponent(GlDropdown).text();
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
+ const findActiveIconByIndex = (index) => findDropdownItemByIndex(index).findComponent(GlIcon);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+
+ const createComponent = ({ props = {}, searchTerm = '' } = {}) => {
+ wrapper = mount(CiEnvironmentsDropdown, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+
+ findSearchBox().vm.$emit('input', searchTerm);
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('No environments found', () => {
+ beforeEach(() => {
+ createComponent({ searchTerm: 'stable' });
+ });
+
+ it('renders create button with search term if environments do not contain search term', () => {
+ expect(findAllDropdownItems()).toHaveLength(2);
+ expect(findDropdownItemByIndex(1).text()).toBe('Create wildcard: stable');
+ });
+
+ it('renders empty results message', () => {
+ expect(findDropdownItemByIndex(0).text()).toBe('No matching results');
+ });
+ });
+
+ describe('Search term is empty', () => {
+ beforeEach(() => {
+ createComponent({ props: { environments: envs } });
+ });
+
+ it('renders all environments when search term is empty', () => {
+ expect(findAllDropdownItems()).toHaveLength(3);
+ expect(findDropdownItemByIndex(0).text()).toBe(envs[0]);
+ expect(findDropdownItemByIndex(1).text()).toBe(envs[1]);
+ expect(findDropdownItemByIndex(2).text()).toBe(envs[2]);
+ });
+
+ it('should not display active checkmark on the inactive stage', () => {
+ expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
+ });
+ });
+
+ describe('when `*` is the value of selectedEnvironmentScope props', () => {
+ const wildcardScope = '*';
+
+ beforeEach(() => {
+ createComponent({ props: { selectedEnvironmentScope: wildcardScope } });
+ });
+
+ it('shows the `All environments` text and not the wildcard', () => {
+ expect(findDropdownText()).toContain(allEnvironments.text);
+ expect(findDropdownText()).not.toContain(wildcardScope);
+ });
+ });
+
+ describe('Environments found', () => {
+ const currentEnv = envs[2];
+
+ beforeEach(async () => {
+ createComponent({ searchTerm: currentEnv });
+ await nextTick();
+ });
+
+ it('renders only the environment searched for', () => {
+ expect(findAllDropdownItems()).toHaveLength(1);
+ expect(findDropdownItemByIndex(0).text()).toBe(currentEnv);
+ });
+
+ it('should not display create button', () => {
+ const environments = findAllDropdownItems().filter((env) => env.text().startsWith('Create'));
+ expect(environments).toHaveLength(0);
+ expect(findAllDropdownItems()).toHaveLength(1);
+ });
+
+ it('should not display empty results message', () => {
+ expect(wrapper.findComponent({ ref: 'noMatchingResults' }).exists()).toBe(false);
+ });
+
+ it('should clear the search term when showing the dropdown', () => {
+ wrapper.findComponent(GlDropdown).trigger('click');
+
+ expect(findSearchBox().text()).toBe('');
+ });
+
+ describe('Custom events', () => {
+ describe('when clicking on an environment', () => {
+ const itemIndex = 0;
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should emit `select-environment` if an environment is clicked', async () => {
+ await nextTick();
+
+ await findDropdownItemByIndex(itemIndex).vm.$emit('click');
+
+ expect(wrapper.emitted('select-environment')).toEqual([[envs[itemIndex]]]);
+ });
+ });
+
+ describe('when creating a new environment from a search term', () => {
+ const search = 'new-env';
+ beforeEach(() => {
+ createComponent({ searchTerm: search });
+ });
+
+ it('should emit createClicked if an environment is clicked', async () => {
+ await nextTick();
+ findDropdownItemByIndex(1).vm.$emit('click');
+ expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
new file mode 100644
index 00000000000..e45656acfd8
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
@@ -0,0 +1,183 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import { resolvers } from '~/ci_variable_list/graphql/resolvers';
+import { convertToGraphQLId } from '~/graphql_shared/utils';
+
+import ciGroupVariables from '~/ci_variable_list/components/ci_group_variables.vue';
+import ciVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
+import ciVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import getGroupVariables from '~/ci_variable_list/graphql/queries/group_variables.query.graphql';
+
+import addGroupVariable from '~/ci_variable_list/graphql/mutations/group_add_variable.mutation.graphql';
+import deleteGroupVariable from '~/ci_variable_list/graphql/mutations/group_delete_variable.mutation.graphql';
+import updateGroupVariable from '~/ci_variable_list/graphql/mutations/group_update_variable.mutation.graphql';
+
+import { genericMutationErrorText, variableFetchErrorText } from '~/ci_variable_list/constants';
+
+import { mockGroupVariables, newVariable } from '../mocks';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+const mockProvide = {
+ endpoint: '/variables',
+ groupPath: '/namespace/group',
+ groupId: 1,
+};
+
+describe('Ci Group Variable list', () => {
+ let wrapper;
+
+ let mockApollo;
+ let mockVariables;
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findCiTable = () => wrapper.findComponent(GlTable);
+ const findCiSettings = () => wrapper.findComponent(ciVariableSettings);
+
+ // eslint-disable-next-line consistent-return
+ const createComponentWithApollo = async ({ isLoading = false } = {}) => {
+ const handlers = [[getGroupVariables, mockVariables]];
+
+ mockApollo = createMockApollo(handlers, resolvers);
+
+ wrapper = shallowMount(ciGroupVariables, {
+ provide: mockProvide,
+ apolloProvider: mockApollo,
+ stubs: { ciVariableSettings, ciVariableTable },
+ });
+
+ if (!isLoading) {
+ return waitForPromises();
+ }
+ };
+
+ beforeEach(() => {
+ mockVariables = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('while queries are being fetch', () => {
+ beforeEach(() => {
+ createComponentWithApollo({ isLoading: true });
+ });
+
+ it('shows a loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findCiTable().exists()).toBe(false);
+ });
+ });
+
+ describe('when queries are resolved', () => {
+ describe('successfuly', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockGroupVariables);
+
+ await createComponentWithApollo();
+ });
+
+ it('passes down the expected environments as props', () => {
+ expect(findCiSettings().props('environments')).toEqual([]);
+ });
+
+ it('passes down the expected variables as props', () => {
+ expect(findCiSettings().props('variables')).toEqual(
+ mockGroupVariables.data.group.ciVariables.nodes,
+ );
+ });
+
+ it('createFlash was not called', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('with an error for variables', () => {
+ beforeEach(async () => {
+ mockVariables.mockRejectedValue();
+
+ await createComponentWithApollo();
+ });
+
+ it('calls createFlash with the expected error message', () => {
+ expect(createFlash).toHaveBeenCalledWith({ message: variableFetchErrorText });
+ });
+ });
+ });
+
+ describe('mutations', () => {
+ beforeEach(async () => {
+ mockVariables.mockResolvedValue(mockGroupVariables);
+
+ await createComponentWithApollo();
+ });
+ it.each`
+ actionName | mutation | event
+ ${'add'} | ${addGroupVariable} | ${'add-variable'}
+ ${'update'} | ${updateGroupVariable} | ${'update-variable'}
+ ${'delete'} | ${deleteGroupVariable} | ${'delete-variable'}
+ `(
+ 'calls the right mutation when user performs $actionName variable',
+ async ({ event, mutation }) => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
+ await findCiSettings().vm.$emit(event, newVariable);
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation,
+ variables: {
+ endpoint: mockProvide.endpoint,
+ fullPath: mockProvide.groupPath,
+ groupId: convertToGraphQLId('Group', mockProvide.groupId),
+ variable: newVariable,
+ },
+ });
+ },
+ );
+
+ it.each`
+ actionName | event | mutationName
+ ${'add'} | ${'add-variable'} | ${'addGroupVariable'}
+ ${'update'} | ${'update-variable'} | ${'updateGroupVariable'}
+ ${'delete'} | ${'delete-variable'} | ${'deleteGroupVariable'}
+ `(
+ 'throws with the specific graphql error if present when user performs $actionName variable',
+ async ({ event, mutationName }) => {
+ const graphQLErrorMessage = 'There is a problem with this graphQL action';
+ jest
+ .spyOn(wrapper.vm.$apollo, 'mutate')
+ .mockResolvedValue({ data: { [mutationName]: { errors: [graphQLErrorMessage] } } });
+ await findCiSettings().vm.$emit(event, newVariable);
+ await nextTick();
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({ message: graphQLErrorMessage });
+ },
+ );
+
+ it.each`
+ actionName | event
+ ${'add'} | ${'add-variable'}
+ ${'update'} | ${'update-variable'}
+ ${'delete'} | ${'delete-variable'}
+ `(
+ 'throws generic error when the mutation fails with no graphql errors and user performs $actionName variable',
+ async ({ event }) => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockImplementationOnce(() => {
+ throw new Error();
+ });
+ await findCiSettings().vm.$emit(event, newVariable);
+
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalled();
+ expect(createFlash).toHaveBeenCalledWith({ message: genericMutationErrorText });
+ },
+ );
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
new file mode 100644
index 00000000000..e5019e3261e
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -0,0 +1,383 @@
+import { GlButton, GlFormInput } from '@gitlab/ui';
+import { mockTracking } from 'helpers/tracking_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
+import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import {
+ ADD_VARIABLE_ACTION,
+ AWS_ACCESS_KEY_ID,
+ EDIT_VARIABLE_ACTION,
+ EVENT_LABEL,
+ EVENT_ACTION,
+ ENVIRONMENT_SCOPE_LINK_TITLE,
+ instanceString,
+} from '~/ci_variable_list/constants';
+import { mockVariablesWithScopes } from '../mocks';
+import ModalStub from '../stubs';
+
+describe('Ci variable modal', () => {
+ let wrapper;
+ let trackingSpy;
+
+ const maskableRegex = '^[a-zA-Z0-9_+=/@:.~-]{8,}$';
+ const mockVariables = mockVariablesWithScopes(instanceString);
+
+ const defaultProvide = {
+ awsLogoSvgPath: '/logo',
+ awsTipCommandsLink: '/tips',
+ awsTipDeployLink: '/deploy',
+ awsTipLearnLink: '/learn-link',
+ containsVariableReferenceLink: '/reference',
+ environmentScopeLink: '/help/environments',
+ isProtectedByDefault: false,
+ maskedEnvironmentVariablesLink: '/variables-link',
+ maskableRegex,
+ protectedEnvironmentVariablesLink: '/protected-link',
+ };
+
+ const defaultProps = {
+ areScopedVariablesAvailable: true,
+ environments: [],
+ mode: ADD_VARIABLE_ACTION,
+ selectedVariable: {},
+ variable: [],
+ };
+
+ const createComponent = ({ mountFn = shallowMountExtended, props = {}, provide = {} } = {}) => {
+ wrapper = mountFn(CiVariableModal, {
+ attachTo: document.body,
+ provide: { ...defaultProvide, ...provide },
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ GlModal: ModalStub,
+ },
+ });
+ };
+
+ const findCiEnvironmentsDropdown = () => wrapper.find(CiEnvironmentsDropdown);
+ const findReferenceWarning = () => wrapper.findByTestId('contains-variable-reference');
+ const findModal = () => wrapper.find(ModalStub);
+ const findAWSTip = () => wrapper.findByTestId('aws-guidance-tip');
+ const findAddorUpdateButton = () => wrapper.findByTestId('ciUpdateOrAddVariableBtn');
+ const deleteVariableButton = () =>
+ findModal()
+ .findAll(GlButton)
+ .wrappers.find((button) => button.props('variant') === 'danger');
+ const findProtectedVariableCheckbox = () =>
+ wrapper.findByTestId('ci-variable-protected-checkbox');
+ const findMaskedVariableCheckbox = () => wrapper.findByTestId('ci-variable-masked-checkbox');
+ const findValueField = () => wrapper.find('#ci-variable-value');
+ const findEnvScopeLink = () => wrapper.findByTestId('environment-scope-link');
+ const findEnvScopeInput = () => wrapper.findByTestId('environment-scope').find(GlFormInput);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Adding a variable', () => {
+ describe('when no key/value pair are present', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows the submit button as disabled ', () => {
+ expect(findAddorUpdateButton().attributes('disabled')).toBe('true');
+ });
+ });
+
+ describe('when a key/value pair is present', () => {
+ beforeEach(() => {
+ createComponent({ props: { selectedVariable: mockVariables[0] } });
+ });
+
+ it('shows the submit button as enabled ', () => {
+ expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
+ });
+ });
+
+ describe('events', () => {
+ const [currentVariable] = mockVariables;
+
+ beforeEach(() => {
+ createComponent({ props: { selectedVariable: currentVariable } });
+ jest.spyOn(wrapper.vm, '$emit');
+ });
+
+ it('Dispatches `add-variable` action on submit', () => {
+ findAddorUpdateButton().vm.$emit('click');
+ expect(wrapper.emitted('add-variable')).toEqual([[currentVariable]]);
+ });
+
+ it('Dispatches the `hideModal` event when dismissing', () => {
+ findModal().vm.$emit('hidden');
+ expect(wrapper.emitted('hideModal')).toEqual([[]]);
+ });
+ });
+ });
+
+ describe('when protected by default', () => {
+ describe('when adding a new variable', () => {
+ beforeEach(() => {
+ createComponent({ provide: { isProtectedByDefault: true } });
+ findModal().vm.$emit('shown');
+ });
+
+ it('updates the protected value to true', () => {
+ expect(findProtectedVariableCheckbox().attributes('data-is-protected-checked')).toBe(
+ 'true',
+ );
+ });
+ });
+
+ describe('when editing a variable', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: { isProtectedByDefault: false },
+ props: {
+ selectedVariable: {},
+ mode: EDIT_VARIABLE_ACTION,
+ },
+ });
+ findModal().vm.$emit('shown');
+ });
+
+ it('keeps the value as false', async () => {
+ expect(
+ findProtectedVariableCheckbox().attributes('data-is-protected-checked'),
+ ).toBeUndefined();
+ });
+ });
+ });
+
+ describe('Adding a new non-AWS variable', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ createComponent({ mountFn: mountExtended, props: { selectedVariable: variable } });
+ });
+
+ it('does not show AWS guidance tip', () => {
+ const tip = findAWSTip();
+ expect(tip.exists()).toBe(true);
+ expect(tip.isVisible()).toBe(false);
+ });
+ });
+
+ describe('Adding a new AWS variable', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ const AWSKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: 'AKIAIOSFODNN7EXAMPLEjdhy',
+ };
+ createComponent({ mountFn: mountExtended, props: { selectedVariable: AWSKeyVariable } });
+ });
+
+ it('shows AWS guidance tip', () => {
+ const tip = findAWSTip();
+ expect(tip.exists()).toBe(true);
+ expect(tip.isVisible()).toBe(true);
+ });
+ });
+
+ describe('Reference warning when adding a variable', () => {
+ describe('with a $ character', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ const variableWithDollarSign = {
+ ...variable,
+ value: 'valueWith$',
+ };
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: variableWithDollarSign },
+ });
+ });
+
+ it(`renders the variable reference warning`, () => {
+ expect(findReferenceWarning().exists()).toBe(true);
+ });
+ });
+
+ describe('without a $ character', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: variable },
+ });
+ });
+
+ it(`does not render the variable reference warning`, () => {
+ expect(findReferenceWarning().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('Editing a variable', () => {
+ const [variable] = mockVariables;
+
+ beforeEach(() => {
+ createComponent({ props: { selectedVariable: variable, mode: EDIT_VARIABLE_ACTION } });
+ jest.spyOn(wrapper.vm, '$emit');
+ });
+
+ it('button text is Update variable when updating', () => {
+ expect(findAddorUpdateButton().text()).toBe('Update variable');
+ });
+
+ it('Update variable button dispatches updateVariable with correct variable', () => {
+ findAddorUpdateButton().vm.$emit('click');
+ expect(wrapper.emitted('update-variable')).toEqual([[variable]]);
+ });
+
+ it('Propagates the `hideModal` event', () => {
+ findModal().vm.$emit('hidden');
+ expect(wrapper.emitted('hideModal')).toEqual([[]]);
+ });
+
+ it('dispatches `delete-variable` with correct variable to delete', () => {
+ deleteVariableButton().vm.$emit('click');
+ expect(wrapper.emitted('delete-variable')).toEqual([[variable]]);
+ });
+ });
+
+ describe('Environment scope', () => {
+ describe('when feature is available', () => {
+ it('renders the environment dropdown', () => {
+ createComponent({
+ mountFn: mountExtended,
+ props: {
+ areScopedVariablesAvailable: true,
+ },
+ });
+
+ expect(findCiEnvironmentsDropdown().exists()).toBe(true);
+ expect(findCiEnvironmentsDropdown().isVisible()).toBe(true);
+ });
+
+ it('renders a link to documentation on scopes', () => {
+ createComponent({ mountFn: mountExtended });
+
+ const link = findEnvScopeLink();
+
+ expect(link.attributes('title')).toBe(ENVIRONMENT_SCOPE_LINK_TITLE);
+ expect(link.attributes('href')).toBe(defaultProvide.environmentScopeLink);
+ });
+ });
+
+ describe('when feature is not available', () => {
+ it('disables the dropdown', () => {
+ createComponent({
+ mountFn: mountExtended,
+ props: {
+ areScopedVariablesAvailable: false,
+ },
+ });
+
+ expect(findCiEnvironmentsDropdown().exists()).toBe(false);
+ expect(findEnvScopeInput().attributes('readonly')).toBe('readonly');
+ });
+ });
+ });
+
+ describe('Validations', () => {
+ const maskError = 'This variable can not be masked.';
+
+ describe('when the mask state is invalid', () => {
+ beforeEach(async () => {
+ const [variable] = mockVariables;
+ const invalidMaskVariable = {
+ ...variable,
+ value: 'd:;',
+ masked: false,
+ };
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: invalidMaskVariable },
+ });
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ await findMaskedVariableCheckbox().trigger('click');
+ });
+
+ it('disables the submit button', () => {
+ expect(findAddorUpdateButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('shows the correct error text', () => {
+ expect(findModal().text()).toContain(maskError);
+ });
+
+ it('sends the correct tracking event', () => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, {
+ label: EVENT_LABEL,
+ property: ';',
+ });
+ });
+ });
+
+ describe.each`
+ value | masked | eventSent | trackingErrorProperty
+ ${'secretValue'} | ${false} | ${0} | ${null}
+ ${'short'} | ${true} | ${0} | ${null}
+ ${'dollar$ign'} | ${false} | ${1} | ${'$'}
+ ${'dollar$ign'} | ${true} | ${1} | ${'$'}
+ ${'unsupported|char'} | ${true} | ${1} | ${'|'}
+ ${'unsupported|char'} | ${false} | ${0} | ${null}
+ `('Adding a new variable', ({ value, masked, eventSent, trackingErrorProperty }) => {
+ beforeEach(async () => {
+ const [variable] = mockVariables;
+ const invalidKeyVariable = {
+ ...variable,
+ value: '',
+ masked: false,
+ };
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: invalidKeyVariable },
+ });
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ await findValueField().vm.$emit('input', value);
+ if (masked) {
+ await findMaskedVariableCheckbox().trigger('click');
+ }
+ });
+
+ it(`${
+ eventSent > 0 ? 'sends the correct' : 'does not send the'
+ } variable validation tracking event with ${value}`, () => {
+ expect(trackingSpy).toHaveBeenCalledTimes(eventSent);
+
+ if (eventSent > 0) {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, EVENT_ACTION, {
+ label: EVENT_LABEL,
+ property: trackingErrorProperty,
+ });
+ }
+ });
+ });
+
+ describe('when masked variable has acceptable value', () => {
+ beforeEach(() => {
+ const [variable] = mockVariables;
+ const validMaskandKeyVariable = {
+ ...variable,
+ key: AWS_ACCESS_KEY_ID,
+ value: '12345678',
+ masked: true,
+ };
+ createComponent({
+ mountFn: mountExtended,
+ props: { selectedVariable: validMaskandKeyVariable },
+ });
+ });
+
+ it('does not disable the submit button', () => {
+ expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
new file mode 100644
index 00000000000..5c77ce71b41
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
@@ -0,0 +1,128 @@
+import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import CiVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
+import ciVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import ciVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import {
+ ADD_VARIABLE_ACTION,
+ EDIT_VARIABLE_ACTION,
+ projectString,
+} from '~/ci_variable_list/constants';
+import { mapEnvironmentNames } from '~/ci_variable_list/utils';
+
+import { mockEnvs, mockVariablesWithScopes, newVariable } from '../mocks';
+
+describe('Ci variable table', () => {
+ let wrapper;
+
+ const defaultProps = {
+ areScopedVariablesAvailable: true,
+ environments: mapEnvironmentNames(mockEnvs),
+ isLoading: false,
+ variables: mockVariablesWithScopes(projectString),
+ };
+
+ const findCiVariableTable = () => wrapper.findComponent(ciVariableTable);
+ const findCiVariableModal = () => wrapper.findComponent(ciVariableModal);
+
+ const createComponent = () => {
+ wrapper = shallowMount(CiVariableSettings, {
+ propsData: {
+ ...defaultProps,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('props passing', () => {
+ it('passes props down correctly to the ci table', () => {
+ expect(findCiVariableTable().props()).toEqual({
+ isLoading: defaultProps.isLoading,
+ variables: defaultProps.variables,
+ });
+ });
+
+ it('passes props down correctly to the ci modal', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable');
+ await nextTick();
+
+ expect(findCiVariableModal().props()).toEqual({
+ areScopedVariablesAvailable: defaultProps.areScopedVariablesAvailable,
+ environments: defaultProps.environments,
+ variables: defaultProps.variables,
+ mode: ADD_VARIABLE_ACTION,
+ selectedVariable: {},
+ });
+ });
+ });
+
+ describe('modal mode', () => {
+ it('passes down ADD mode when receiving an empty variable', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable');
+ await nextTick();
+
+ expect(findCiVariableModal().props('mode')).toBe(ADD_VARIABLE_ACTION);
+ });
+
+ it('passes down EDIT mode when receiving a variable', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
+ await nextTick();
+
+ expect(findCiVariableModal().props('mode')).toBe(EDIT_VARIABLE_ACTION);
+ });
+ });
+
+ describe('variable modal', () => {
+ it('is hidden by default', () => {
+ expect(findCiVariableModal().exists()).toBe(false);
+ });
+
+ it('shows modal when adding a new variable', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable');
+ await nextTick();
+
+ expect(findCiVariableModal().exists()).toBe(true);
+ });
+
+ it('shows modal when updating a variable', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable', newVariable);
+ await nextTick();
+
+ expect(findCiVariableModal().exists()).toBe(true);
+ });
+
+ it('hides modal when receiving the event from the modal', async () => {
+ findCiVariableTable().vm.$emit('set-selected-variable');
+ await nextTick();
+
+ findCiVariableModal().vm.$emit('hideModal');
+ await nextTick();
+
+ expect(findCiVariableModal().exists()).toBe(false);
+ });
+ });
+
+ describe('variable events', () => {
+ it.each`
+ eventName
+ ${'add-variable'}
+ ${'update-variable'}
+ ${'delete-variable'}
+ `('bubbles up the $eventName event', async ({ eventName }) => {
+ findCiVariableTable().vm.$emit('set-selected-variable');
+ await nextTick();
+
+ findCiVariableModal().vm.$emit(eventName, newVariable);
+ await nextTick();
+
+ expect(wrapper.emitted(eventName)).toEqual([[newVariable]]);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
new file mode 100644
index 00000000000..8a4c35173ec
--- /dev/null
+++ b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
@@ -0,0 +1,98 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import CiVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import { projectString } from '~/ci_variable_list/constants';
+import { mockVariables } from '../mocks';
+
+describe('Ci variable table', () => {
+ let wrapper;
+
+ const defaultProps = {
+ isLoading: false,
+ variables: mockVariables(projectString),
+ };
+
+ const createComponent = ({ props = {} } = {}) => {
+ wrapper = mountExtended(CiVariableTable, {
+ attachTo: document.body,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+ };
+
+ const findRevealButton = () => wrapper.findByText('Reveal values');
+ const findAddButton = () => wrapper.findByLabelText('Add');
+ const findEditButton = () => wrapper.findByLabelText('Edit');
+ const findEmptyVariablesPlaceholder = () => wrapper.findByText('There are no variables yet.');
+ const findHiddenValues = () => wrapper.findAll('[data-testid="hiddenValue"]');
+ const findRevealedValues = () => wrapper.findAll('[data-testid="revealedValue"]');
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('When table is empty', () => {
+ beforeEach(() => {
+ createComponent({ props: { variables: [] } });
+ });
+
+ it('displays empty message', () => {
+ expect(findEmptyVariablesPlaceholder().exists()).toBe(true);
+ });
+
+ it('hides the reveal button', () => {
+ expect(findRevealButton().exists()).toBe(false);
+ });
+ });
+
+ describe('When table has variables', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not display the empty message', () => {
+ expect(findEmptyVariablesPlaceholder().exists()).toBe(false);
+ });
+
+ it('displays the reveal button', () => {
+ expect(findRevealButton().exists()).toBe(true);
+ });
+
+ it('displays the correct amount of variables', async () => {
+ expect(wrapper.findAll('.js-ci-variable-row')).toHaveLength(defaultProps.variables.length);
+ });
+ });
+
+ describe('Table click actions', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('reveals secret values when button is clicked', async () => {
+ expect(findHiddenValues()).toHaveLength(defaultProps.variables.length);
+ expect(findRevealedValues()).toHaveLength(0);
+
+ await findRevealButton().trigger('click');
+
+ expect(findHiddenValues()).toHaveLength(0);
+ expect(findRevealedValues()).toHaveLength(defaultProps.variables.length);
+ });
+
+ it('dispatches `setSelectedVariable` with correct variable to edit', async () => {
+ await findEditButton().trigger('click');
+
+ expect(wrapper.emitted('set-selected-variable')).toEqual([[defaultProps.variables[0]]]);
+ });
+
+ it('dispatches `setSelectedVariable` with no variable when adding a new one', async () => {
+ await findAddButton().trigger('click');
+
+ expect(wrapper.emitted('set-selected-variable')).toEqual([[null]]);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js
index 42c6501dcce..6681ab91a4a 100644
--- a/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/legacy_ci_variable_modal_spec.js
@@ -58,7 +58,7 @@ describe('Ci variable modal', () => {
});
it('button is disabled when no key/value pair are present', () => {
- expect(findAddorUpdateButton().attributes('disabled')).toBeTruthy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBe('true');
});
});
@@ -71,7 +71,7 @@ describe('Ci variable modal', () => {
});
it('button is enabled when key/value pair are present', () => {
- expect(findAddorUpdateButton().attributes('disabled')).toBeFalsy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
});
it('Add variable button dispatches addVariable action', () => {
@@ -249,7 +249,7 @@ describe('Ci variable modal', () => {
});
it('disables the submit button', () => {
- expect(findAddorUpdateButton().attributes('disabled')).toBeTruthy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBe('disabled');
});
it('shows the correct error text', () => {
@@ -316,7 +316,7 @@ describe('Ci variable modal', () => {
});
it('does not disable the submit button', () => {
- expect(findAddorUpdateButton().attributes('disabled')).toBeFalsy();
+ expect(findAddorUpdateButton().attributes('disabled')).toBeUndefined();
});
});
});
diff --git a/spec/frontend/ci_variable_list/mocks.js b/spec/frontend/ci_variable_list/mocks.js
new file mode 100644
index 00000000000..89ba77858dc
--- /dev/null
+++ b/spec/frontend/ci_variable_list/mocks.js
@@ -0,0 +1,109 @@
+import { variableTypes, groupString, instanceString } from '~/ci_variable_list/constants';
+
+export const devName = 'dev';
+export const prodName = 'prod';
+
+export const mockVariables = (kind) => {
+ return [
+ {
+ __typename: `Ci${kind}Variable`,
+ id: 1,
+ key: 'my-var',
+ masked: false,
+ protected: true,
+ value: 'env_val',
+ variableType: variableTypes.variableType,
+ },
+ {
+ __typename: `Ci${kind}Variable`,
+ id: 2,
+ key: 'secret',
+ masked: true,
+ protected: false,
+ value: 'the_secret_value',
+ variableType: variableTypes.fileType,
+ },
+ ];
+};
+
+export const mockVariablesWithScopes = (kind) =>
+ mockVariables(kind).map((variable) => {
+ return { ...variable, environmentScope: '*' };
+ });
+
+const createDefaultVars = ({ withScope = true, kind } = {}) => {
+ let base = mockVariables(kind);
+
+ if (withScope) {
+ base = mockVariablesWithScopes(kind);
+ }
+
+ return {
+ __typename: `Ci${kind}VariableConnection`,
+ nodes: base,
+ };
+};
+
+const defaultEnvs = {
+ __typename: 'EnvironmentConnection',
+ nodes: [
+ {
+ __typename: 'Environment',
+ id: 1,
+ name: prodName,
+ },
+ {
+ __typename: 'Environment',
+ id: 2,
+ name: devName,
+ },
+ ],
+};
+
+export const mockEnvs = defaultEnvs.nodes;
+
+export const mockProjectEnvironments = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 1,
+ environments: defaultEnvs,
+ },
+ },
+};
+
+export const mockProjectVariables = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: 1,
+ ciVariables: createDefaultVars(),
+ },
+ },
+};
+
+export const mockGroupVariables = {
+ data: {
+ group: {
+ __typename: 'Group',
+ id: 1,
+ ciVariables: createDefaultVars({ kind: groupString }),
+ },
+ },
+};
+
+export const mockAdminVariables = {
+ data: {
+ ciVariables: createDefaultVars({ withScope: false, kind: instanceString }),
+ },
+};
+
+export const newVariable = {
+ id: 3,
+ environmentScope: 'new',
+ key: 'AWS_RANDOM_THING',
+ masked: true,
+ protected: false,
+ value: 'devops',
+ variableType: variableTypes.variableType,
+};
diff --git a/spec/frontend/ci_variable_list/utils_spec.js b/spec/frontend/ci_variable_list/utils_spec.js
new file mode 100644
index 00000000000..081c399792f
--- /dev/null
+++ b/spec/frontend/ci_variable_list/utils_spec.js
@@ -0,0 +1,78 @@
+import {
+ createJoinedEnvironments,
+ convertEnvironmentScope,
+ mapEnvironmentNames,
+} from '~/ci_variable_list/utils';
+import { allEnvironments } from '~/ci_variable_list/constants';
+
+describe('utils', () => {
+ const environments = ['dev', 'prod'];
+ const newEnvironments = ['staging'];
+
+ describe('createJoinedEnvironments', () => {
+ it('returns only `environments` if `variables` argument is undefined', () => {
+ const variables = undefined;
+
+ expect(createJoinedEnvironments(variables, environments, [])).toEqual(environments);
+ });
+
+ it('returns a list of environments and environment scopes taken from variables in alphabetical order', () => {
+ const envScope1 = 'new1';
+ const envScope2 = 'new2';
+
+ const variables = [{ environmentScope: envScope1 }, { environmentScope: envScope2 }];
+
+ expect(createJoinedEnvironments(variables, environments, [])).toEqual([
+ environments[0],
+ envScope1,
+ envScope2,
+ environments[1],
+ ]);
+ });
+
+ it('returns combined list with new environments included', () => {
+ const variables = undefined;
+
+ expect(createJoinedEnvironments(variables, environments, newEnvironments)).toEqual([
+ ...environments,
+ ...newEnvironments,
+ ]);
+ });
+
+ it('removes duplicate environments', () => {
+ const envScope1 = environments[0];
+ const envScope2 = 'new2';
+
+ const variables = [{ environmentScope: envScope1 }, { environmentScope: envScope2 }];
+
+ expect(createJoinedEnvironments(variables, environments, [])).toEqual([
+ environments[0],
+ envScope2,
+ environments[1],
+ ]);
+ });
+ });
+
+ describe('convertEnvironmentScope', () => {
+ it('converts the * to the `All environments` text', () => {
+ expect(convertEnvironmentScope('*')).toBe(allEnvironments.text);
+ });
+
+ it('returns the environment as is if not the *', () => {
+ expect(convertEnvironmentScope('prod')).toBe('prod');
+ });
+ });
+
+ describe('mapEnvironmentNames', () => {
+ const envName = 'dev';
+ const envName2 = 'prod';
+
+ const nodes = [
+ { name: envName, otherProp: {} },
+ { name: envName2, otherProp: {} },
+ ];
+ it('flatten a nodes array with only their names', () => {
+ expect(mapEnvironmentNames(nodes)).toEqual([envName, envName2]);
+ });
+ });
+});
diff --git a/spec/frontend/clusters/agents/components/activity_history_item_spec.js b/spec/frontend/clusters/agents/components/activity_history_item_spec.js
index 100a280d0cc..68f6f11aa8f 100644
--- a/spec/frontend/clusters/agents/components/activity_history_item_spec.js
+++ b/spec/frontend/clusters/agents/components/activity_history_item_spec.js
@@ -23,7 +23,7 @@ describe('ActivityHistoryItem', () => {
};
const findHistoryItem = () => wrapper.findComponent(HistoryItem);
- const findTimeAgo = () => wrapper.find(TimeAgoTooltip);
+ const findTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/clusters/agents/components/create_token_modal_spec.js b/spec/frontend/clusters/agents/components/create_token_modal_spec.js
index ad48afe10b6..0d10801e80e 100644
--- a/spec/frontend/clusters/agents/components/create_token_modal_spec.js
+++ b/spec/frontend/clusters/agents/components/create_token_modal_spec.js
@@ -55,7 +55,7 @@ describe('CreateTokenModal', () => {
const findAgentInstructions = () => findModal().findComponent(AgentToken);
const findButtonByVariant = (variant) =>
findModal()
- .findAll(GlButton)
+ .findAllComponents(GlButton)
.wrappers.find((button) => button.props('variant') === variant);
const findActionButton = () => findButtonByVariant('confirm');
const findCancelButton = () => wrapper.findByTestId('agent-token-close-button');
diff --git a/spec/frontend/clusters/agents/components/token_table_spec.js b/spec/frontend/clusters/agents/components/token_table_spec.js
index 6caeaf5c192..334615f1818 100644
--- a/spec/frontend/clusters/agents/components/token_table_spec.js
+++ b/spec/frontend/clusters/agents/components/token_table_spec.js
@@ -136,8 +136,8 @@ describe('ClusterAgentTokenTable', () => {
const token = tokens.at(lineNumber);
expect(token.text()).toContain(description);
- expect(token.find(GlTruncate).exists()).toBe(truncatesText);
- expect(token.find(GlTooltip).exists()).toBe(hasTooltip);
+ expect(token.findComponent(GlTruncate).exists()).toBe(truncatesText);
+ expect(token.findComponent(GlTooltip).exists()).toBe(hasTooltip);
},
);
diff --git a/spec/frontend/clusters/clusters_bundle_spec.js b/spec/frontend/clusters/clusters_bundle_spec.js
index b5345ea8915..ad2aa4acbaf 100644
--- a/spec/frontend/clusters/clusters_bundle_spec.js
+++ b/spec/frontend/clusters/clusters_bundle_spec.js
@@ -1,7 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
-import { setTestTimeout } from 'helpers/timeout';
import Clusters from '~/clusters/clusters_bundle';
import axios from '~/lib/utils/axios_utils';
import initProjectSelectDropdown from '~/project_select';
@@ -12,8 +11,6 @@ jest.mock('~/project_select');
useMockLocationHelper();
describe('Clusters', () => {
- setTestTimeout(1000);
-
let cluster;
let mock;
@@ -60,9 +57,9 @@ describe('Clusters', () => {
it('should show the creating container', () => {
cluster.updateContainer(null, 'creating');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeFalsy();
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(false);
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(true);
expect(window.location.reload).not.toHaveBeenCalled();
});
@@ -70,9 +67,9 @@ describe('Clusters', () => {
cluster.updateContainer(null, 'creating');
cluster.updateContainer('creating', 'creating');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeFalsy();
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(false);
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(true);
expect(window.location.reload).not.toHaveBeenCalled();
});
});
@@ -83,9 +80,9 @@ describe('Clusters', () => {
cluster.updateContainer(null, 'creating');
cluster.updateContainer('creating', 'created');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(true);
expect(window.location.reload).toHaveBeenCalled();
expect(cluster.setClusterNewlyCreated).toHaveBeenCalledWith(true);
});
@@ -97,9 +94,9 @@ describe('Clusters', () => {
cluster.updateContainer(null, 'created');
cluster.updateContainer('created', 'created');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.successContainer.classList.contains('hidden')).toBeFalsy();
- expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(false);
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(true);
expect(window.location.reload).not.toHaveBeenCalled();
expect(cluster.setClusterNewlyCreated).toHaveBeenCalledWith(false);
});
@@ -111,9 +108,9 @@ describe('Clusters', () => {
cluster.updateContainer(null, 'created');
cluster.updateContainer('created', 'created');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
- expect(cluster.errorContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(true);
expect(window.location.reload).not.toHaveBeenCalled();
expect(cluster.setClusterNewlyCreated).not.toHaveBeenCalled();
});
@@ -123,11 +120,11 @@ describe('Clusters', () => {
it('should show the error container', () => {
cluster.updateContainer(null, 'errored', 'this is an error');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(true);
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
- expect(cluster.errorContainer.classList.contains('hidden')).toBeFalsy();
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(false);
expect(cluster.errorReasonContainer.textContent).toContain('this is an error');
});
@@ -135,11 +132,11 @@ describe('Clusters', () => {
it('should show `error` banner when previously `creating`', () => {
cluster.updateContainer('creating', 'errored');
- expect(cluster.creatingContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.creatingContainer.classList.contains('hidden')).toBe(true);
- expect(cluster.successContainer.classList.contains('hidden')).toBeTruthy();
+ expect(cluster.successContainer.classList.contains('hidden')).toBe(true);
- expect(cluster.errorContainer.classList.contains('hidden')).toBeFalsy();
+ expect(cluster.errorContainer.classList.contains('hidden')).toBe(false);
});
});
diff --git a/spec/frontend/clusters/components/new_cluster_spec.js b/spec/frontend/clusters/components/new_cluster_spec.js
index f9df70b9f87..ef39c90aaef 100644
--- a/spec/frontend/clusters/components/new_cluster_spec.js
+++ b/spec/frontend/clusters/components/new_cluster_spec.js
@@ -12,9 +12,9 @@ describe('NewCluster', () => {
await nextTick();
};
- const findDescription = () => wrapper.find(GlSprintf);
+ const findDescription = () => wrapper.findComponent(GlSprintf);
- const findLink = () => wrapper.find(GlLink);
+ const findLink = () => wrapper.findComponent(GlLink);
beforeEach(() => {
return createWrapper();
diff --git a/spec/frontend/clusters/forms/components/integration_form_spec.js b/spec/frontend/clusters/forms/components/integration_form_spec.js
index 67d442bfdc5..b17886a5826 100644
--- a/spec/frontend/clusters/forms/components/integration_form_spec.js
+++ b/spec/frontend/clusters/forms/components/integration_form_spec.js
@@ -32,8 +32,8 @@ describe('ClusterIntegrationForm', () => {
wrapper = null;
};
- const findSubmitButton = () => wrapper.find(GlButton);
- const findGlToggle = () => wrapper.find(GlToggle);
+ const findSubmitButton = () => wrapper.findComponent(GlButton);
+ const findGlToggle = () => wrapper.findComponent(GlToggle);
afterEach(() => {
destroyWrapper();
diff --git a/spec/frontend/clusters_list/components/install_agent_modal_spec.js b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
index 29884675b24..964dd005a27 100644
--- a/spec/frontend/clusters_list/components/install_agent_modal_spec.js
+++ b/spec/frontend/clusters_list/components/install_agent_modal_spec.js
@@ -150,7 +150,6 @@ describe('InstallAgentModal', () => {
});
it("doesn't render agent installation instructions", () => {
- expect(findModal().text()).not.toContain(i18n.basicInstallTitle);
expect(findModal().findComponent(GlFormInputGroup).exists()).toBe(false);
expect(findModal().findComponent(GlAlert).exists()).toBe(false);
});
diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/pipelines_table_spec.js
index 9b01af1e585..71ee12cf02d 100644
--- a/spec/frontend/commit/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_table_spec.js
@@ -1,4 +1,4 @@
-import { GlEmptyState, GlLoadingIcon, GlModal, GlTableLite } from '@gitlab/ui';
+import { GlLoadingIcon, GlModal, GlTableLite } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -8,7 +8,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import PipelinesTable from '~/commit/pipelines/pipelines_table.vue';
import httpStatusCodes from '~/lib/utils/http_status';
-import createFlash from '~/flash';
+import { createAlert } from '~/flash';
import { TOAST_MESSAGE } from '~/pipelines/constants';
import axios from '~/lib/utils/axios_utils';
@@ -26,10 +26,12 @@ describe('Pipelines table in Commits and Merge requests', () => {
const findRunPipelineBtn = () => wrapper.findByTestId('run_pipeline_button');
const findRunPipelineBtnMobile = () => wrapper.findByTestId('run_pipeline_button_mobile');
const findLoadingState = () => wrapper.findComponent(GlLoadingIcon);
- const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findErrorEmptyState = () => wrapper.findByTestId('pipeline-error-empty-state');
+ const findEmptyState = () => wrapper.findByTestId('pipeline-empty-state');
const findTable = () => wrapper.findComponent(GlTableLite);
const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findModal = () => wrapper.findComponent(GlModal);
+ const findMrPipelinesDocsLink = () => wrapper.findByTestId('mr-pipelines-docs-link');
const createComponent = (props = {}) => {
wrapper = extendedWrapper(
@@ -73,7 +75,18 @@ describe('Pipelines table in Commits and Merge requests', () => {
it('should render the empty state', () => {
expect(findTableRows()).toHaveLength(0);
expect(findLoadingState().exists()).toBe(false);
- expect(findEmptyState().exists()).toBe(false);
+ expect(findErrorEmptyState().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('should render correct empty state content', () => {
+ expect(findRunPipelineBtn().exists()).toBe(true);
+ expect(findMrPipelinesDocsLink().attributes('href')).toBe(
+ '/help/ci/pipelines/merge_request_pipelines.md#prerequisites',
+ );
+ expect(findEmptyState().text()).toContain(
+ 'To run a merge request pipeline, the jobs in the CI/CD configuration file must be configured to run in merge request pipelines.',
+ );
});
});
@@ -90,7 +103,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
expect(findTable().exists()).toBe(true);
expect(findTableRows()).toHaveLength(1);
expect(findLoadingState().exists()).toBe(false);
- expect(findEmptyState().exists()).toBe(false);
+ expect(findErrorEmptyState().exists()).toBe(false);
});
describe('with pagination', () => {
@@ -226,12 +239,14 @@ describe('Pipelines table in Commits and Merge requests', () => {
describe('failure', () => {
const permissionsMsg = 'You do not have permission to run a pipeline on this branch.';
+ const defaultMsg =
+ 'An error occurred while trying to run a new pipeline for this merge request.';
it.each`
status | message
- ${httpStatusCodes.BAD_REQUEST} | ${permissionsMsg}
+ ${httpStatusCodes.BAD_REQUEST} | ${defaultMsg}
${httpStatusCodes.UNAUTHORIZED} | ${permissionsMsg}
- ${httpStatusCodes.INTERNAL_SERVER_ERROR} | ${'An error occurred while trying to run a new pipeline for this merge request.'}
+ ${httpStatusCodes.INTERNAL_SERVER_ERROR} | ${defaultMsg}
`('displays permissions error message', async ({ status, message }) => {
const response = { response: { status } };
@@ -243,7 +258,13 @@ describe('Pipelines table in Commits and Merge requests', () => {
await waitForPromises();
- expect(createFlash).toHaveBeenCalledWith({ message });
+ expect(createAlert).toHaveBeenCalledWith({
+ message,
+ primaryButton: {
+ text: 'Learn more',
+ link: '/help/ci/pipelines/merge_request_pipelines.md',
+ },
+ });
});
});
});
@@ -293,7 +314,7 @@ describe('Pipelines table in Commits and Merge requests', () => {
});
it('should render error state', () => {
- expect(findEmptyState().text()).toBe(
+ expect(findErrorEmptyState().text()).toBe(
'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
);
});
diff --git a/spec/frontend/content_editor/components/bubble_menus/link_spec.js b/spec/frontend/content_editor/components/bubble_menus/link_spec.js
index ba6d8da9584..93204deb68c 100644
--- a/spec/frontend/content_editor/components/bubble_menus/link_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/link_spec.js
@@ -182,7 +182,7 @@ describe('content_editor/components/bubble_menus/link', () => {
it('updates prosemirror doc with new link', async () => {
expect(tiptapEditor.getHTML()).toBe(
- '<p>Download <a target="_blank" rel="noopener noreferrer nofollow" href="https://google.com" title="Search Google" canonicalsrc="https://google.com">PDF File</a></p>',
+ '<p>Download <a target="_blank" rel="noopener noreferrer nofollow" href="https://google.com" title="Search Google">PDF File</a></p>',
);
});
diff --git a/spec/frontend/content_editor/components/bubble_menus/media_spec.js b/spec/frontend/content_editor/components/bubble_menus/media_spec.js
index 8839caea80e..fada4f06743 100644
--- a/spec/frontend/content_editor/components/bubble_menus/media_spec.js
+++ b/spec/frontend/content_editor/components/bubble_menus/media_spec.js
@@ -14,7 +14,7 @@ import {
} from '../../test_constants';
const TIPTAP_IMAGE_HTML = `<p>
- <img src="https://gitlab.com/favicon.png" alt="gitlab favicon" title="gitlab favicon" data-canonical-src="https://gitlab.com/favicon.png">
+ <img src="https://gitlab.com/favicon.png" alt="gitlab favicon" title="gitlab favicon">
</p>`;
const TIPTAP_AUDIO_HTML = `<p>
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
index 9ee3b017831..0ba2672100b 100644
--- a/spec/frontend/content_editor/components/content_editor_spec.js
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -19,6 +19,7 @@ describe('ContentEditor', () => {
const findEditorElement = () => wrapper.findByTestId('content-editor');
const findEditorContent = () => wrapper.findComponent(EditorContent);
+ const findEditorStateObserver = () => wrapper.findComponent(EditorStateObserver);
const createWrapper = (propsData = {}) => {
renderMarkdown = jest.fn();
@@ -119,4 +120,17 @@ describe('ContentEditor', () => {
expect(wrapper.findComponent(FormattingBubbleMenu).exists()).toBe(true);
});
+
+ it.each`
+ event
+ ${'loading'}
+ ${'loadingSuccess'}
+ ${'loadingError'}
+ `('broadcasts $event event triggered by editor-state-observer component', ({ event }) => {
+ createWrapper();
+
+ findEditorStateObserver().vm.$emit(event);
+
+ expect(wrapper.emitted(event)).toHaveLength(1);
+ });
});
diff --git a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
index 351fd967719..62fec8d4e72 100644
--- a/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_more_dropdown_spec.js
@@ -37,16 +37,17 @@ describe('content_editor/components/toolbar_more_dropdown', () => {
});
describe.each`
- name | contentType | command | params
- ${'Code block'} | ${'codeBlock'} | ${'setNode'} | ${['codeBlock']}
- ${'Details block'} | ${'details'} | ${'toggleList'} | ${['details', 'detailsContent']}
- ${'Bullet list'} | ${'bulletList'} | ${'toggleList'} | ${['bulletList', 'listItem']}
- ${'Ordered list'} | ${'orderedList'} | ${'toggleList'} | ${['orderedList', 'listItem']}
- ${'Task list'} | ${'taskList'} | ${'toggleList'} | ${['taskList', 'taskItem']}
- ${'Mermaid diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'mermaid' }]}
- ${'PlantUML diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'plantuml' }]}
- ${'Horizontal rule'} | ${'horizontalRule'} | ${'setHorizontalRule'} | ${[]}
- `('when option $label is clicked', ({ name, command, contentType, params }) => {
+ name | contentType | command | params
+ ${'Code block'} | ${'codeBlock'} | ${'setNode'} | ${['codeBlock']}
+ ${'Details block'} | ${'details'} | ${'toggleList'} | ${['details', 'detailsContent']}
+ ${'Bullet list'} | ${'bulletList'} | ${'toggleList'} | ${['bulletList', 'listItem']}
+ ${'Ordered list'} | ${'orderedList'} | ${'toggleList'} | ${['orderedList', 'listItem']}
+ ${'Task list'} | ${'taskList'} | ${'toggleList'} | ${['taskList', 'taskItem']}
+ ${'Mermaid diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'mermaid' }]}
+ ${'PlantUML diagram'} | ${'diagram'} | ${'setNode'} | ${['diagram', { language: 'plantuml' }]}
+ ${'Table of contents'} | ${'tableOfContents'} | ${'insertTableOfContents'} | ${[]}
+ ${'Horizontal rule'} | ${'horizontalRule'} | ${'setHorizontalRule'} | ${[]}
+ `('when option $name is clicked', ({ name, command, contentType, params }) => {
let commands;
let btn;
diff --git a/spec/frontend/content_editor/components/top_toolbar_spec.js b/spec/frontend/content_editor/components/top_toolbar_spec.js
index 2acb6e14ce0..8f194ff32e2 100644
--- a/spec/frontend/content_editor/components/top_toolbar_spec.js
+++ b/spec/frontend/content_editor/components/top_toolbar_spec.js
@@ -32,7 +32,7 @@ describe('content_editor/components/top_toolbar', () => {
${'link'} | ${{}}
${'bullet-list'} | ${{ contentType: 'bulletList', iconName: 'list-bulleted', label: 'Add a bullet list', editorCommand: 'toggleBulletList' }}
${'ordered-list'} | ${{ contentType: 'orderedList', iconName: 'list-numbered', label: 'Add a numbered list', editorCommand: 'toggleOrderedList' }}
- ${'task-list'} | ${{ contentType: 'taskList', iconName: 'list-task', label: 'Add a task list', editorCommand: 'toggleTaskList' }}
+ ${'task-list'} | ${{ contentType: 'taskList', iconName: 'list-task', label: 'Add a checklist', editorCommand: 'toggleTaskList' }}
${'image'} | ${{}}
${'table'} | ${{}}
${'more'} | ${{}}
diff --git a/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap b/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap
new file mode 100644
index 00000000000..fb091419ad9
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/__snapshots__/table_of_contents_spec.js.snap
@@ -0,0 +1,115 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`content/components/wrappers/table_of_contents collects all headings and renders a nested list of headings 1`] = `
+<div
+ class="table-of-contents gl-border-1 gl-border-solid gl-border-gray-100 gl-mb-5 gl-p-4!"
+ data-testid="table-of-contents"
+>
+
+ Table of contents
+
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1
+
+ </a>
+
+ <ul>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.1
+
+ </a>
+
+ <ul>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.1.1
+
+ </a>
+
+ <!---->
+ </li>
+ </ul>
+ </li>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.2
+
+ </a>
+
+ <ul>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.2.1
+
+ </a>
+
+ <!---->
+ </li>
+ </ul>
+ </li>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.3
+
+ </a>
+
+ <!---->
+ </li>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.4
+
+ </a>
+
+ <ul>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 1.4.1
+
+ </a>
+
+ <!---->
+ </li>
+ </ul>
+ </li>
+ </ul>
+ </li>
+ <li>
+ <a
+ href="#"
+ >
+
+ Heading 2
+
+ </a>
+
+ <!---->
+ </li>
+</div>
+`;
diff --git a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
index 6017a145a87..1fdddce3962 100644
--- a/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
+++ b/spec/frontend/content_editor/components/wrappers/table_cell_base_spec.js
@@ -1,12 +1,12 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { NodeViewWrapper } from '@tiptap/vue-2';
-import { selectedRect as getSelectedRect } from 'prosemirror-tables';
+import { selectedRect as getSelectedRect } from '@_ueberdosis/prosemirror-tables';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TableCellBaseWrapper from '~/content_editor/components/wrappers/table_cell_base.vue';
import { createTestEditor, mockChainedCommands, emitEditorEvent } from '../../test_utils';
-jest.mock('prosemirror-tables');
+jest.mock('@_ueberdosis/prosemirror-tables');
describe('content/components/wrappers/table_cell_base', () => {
let wrapper;
diff --git a/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js b/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js
new file mode 100644
index 00000000000..bfda89a8b09
--- /dev/null
+++ b/spec/frontend/content_editor/components/wrappers/table_of_contents_spec.js
@@ -0,0 +1,84 @@
+import { nextTick } from 'vue';
+import { NodeViewWrapper } from '@tiptap/vue-2';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
+import eventHubFactory from '~/helpers/event_hub_factory';
+import Heading from '~/content_editor/extensions/heading';
+import Diagram from '~/content_editor/extensions/diagram';
+import TableOfContentsWrapper from '~/content_editor/components/wrappers/table_of_contents.vue';
+import { createTestEditor, createDocBuilder, emitEditorEvent } from '../../test_utils';
+
+describe('content/components/wrappers/table_of_contents', () => {
+ let wrapper;
+ let tiptapEditor;
+ let contentEditor;
+ let eventHub;
+
+ const buildEditor = () => {
+ tiptapEditor = createTestEditor({ extensions: [Heading, Diagram] });
+ contentEditor = { renderDiagram: jest.fn().mockResolvedValue('url/to/some/diagram') };
+ eventHub = eventHubFactory();
+ };
+
+ const createWrapper = async () => {
+ wrapper = mountExtended(TableOfContentsWrapper, {
+ propsData: {
+ editor: tiptapEditor,
+ node: {
+ attrs: {},
+ },
+ },
+ stubs: {
+ NodeViewWrapper: stubComponent(NodeViewWrapper),
+ },
+ provide: {
+ contentEditor,
+ tiptapEditor,
+ eventHub,
+ },
+ });
+ };
+
+ beforeEach(async () => {
+ buildEditor();
+ createWrapper();
+
+ const {
+ builders: { heading, doc },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ heading: { nodeType: Heading.name },
+ },
+ });
+
+ const initialDoc = doc(
+ heading({ level: 1 }, 'Heading 1'),
+ heading({ level: 2 }, 'Heading 1.1'),
+ heading({ level: 3 }, 'Heading 1.1.1'),
+ heading({ level: 2 }, 'Heading 1.2'),
+ heading({ level: 3 }, 'Heading 1.2.1'),
+ heading({ level: 2 }, 'Heading 1.3'),
+ heading({ level: 2 }, 'Heading 1.4'),
+ heading({ level: 3 }, 'Heading 1.4.1'),
+ heading({ level: 1 }, 'Heading 2'),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ await emitEditorEvent({ event: 'update', tiptapEditor });
+ await nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a node-view-wrapper as a ul element', () => {
+ expect(wrapper.findComponent(NodeViewWrapper).props().as).toBe('ul');
+ });
+
+ it('collects all headings and renders a nested list of headings', () => {
+ expect(wrapper.findComponent(NodeViewWrapper).element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/content_editor/extensions/image_spec.js b/spec/frontend/content_editor/extensions/image_spec.js
index 256f7bad309..f73b0143fd9 100644
--- a/spec/frontend/content_editor/extensions/image_spec.js
+++ b/spec/frontend/content_editor/extensions/image_spec.js
@@ -35,7 +35,7 @@ describe('content_editor/extensions/image', () => {
tiptapEditor.commands.setContent(initialDoc.toJSON());
expect(tiptapEditor.getHTML()).toEqual(
- '<p><img src="/-/wikis/uploads/image.jpg" alt="image" title="this is an image" data-canonical-src="uploads/image.jpg"></p>',
+ '<p><img src="/-/wikis/uploads/image.jpg" alt="image" title="this is an image"></p>',
);
});
});
diff --git a/spec/frontend/content_editor/markdown_processing_spec_helper.js b/spec/frontend/content_editor/markdown_processing_spec_helper.js
index 41442dd8388..228d009e42c 100644
--- a/spec/frontend/content_editor/markdown_processing_spec_helper.js
+++ b/spec/frontend/content_editor/markdown_processing_spec_helper.js
@@ -2,7 +2,6 @@ import fs from 'fs';
import jsYaml from 'js-yaml';
import { memoize } from 'lodash';
import { createContentEditor } from '~/content_editor';
-import { setTestTimeoutOnce } from 'helpers/timeout';
const getFocusedMarkdownExamples = memoize(
() => process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [],
@@ -76,9 +75,6 @@ export const describeMarkdownProcessing = (description, markdownYamlPath) => {
}
it(exampleName, async () => {
- if (name === 'frontmatter_toml') {
- setTestTimeoutOnce(2000);
- }
await testSerializesHtmlToMarkdownForElement(example);
});
});
diff --git a/spec/frontend/content_editor/remark_markdown_processing_spec.js b/spec/frontend/content_editor/remark_markdown_processing_spec.js
index 48adceaab58..7ae0a7c13c1 100644
--- a/spec/frontend/content_editor/remark_markdown_processing_spec.js
+++ b/spec/frontend/content_editor/remark_markdown_processing_spec.js
@@ -5,6 +5,7 @@ import Code from '~/content_editor/extensions/code';
import CodeBlockHighlight from '~/content_editor/extensions/code_block_highlight';
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
+import Frontmatter from '~/content_editor/extensions/frontmatter';
import HardBreak from '~/content_editor/extensions/hard_break';
import HTMLNodes from '~/content_editor/extensions/html_nodes';
import Heading from '~/content_editor/extensions/heading';
@@ -15,6 +16,7 @@ import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import Paragraph from '~/content_editor/extensions/paragraph';
+import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Sourcemap from '~/content_editor/extensions/sourcemap';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
@@ -37,6 +39,7 @@ const tiptapEditor = createTestEditor({
CodeBlockHighlight,
FootnoteDefinition,
FootnoteReference,
+ Frontmatter,
HardBreak,
Heading,
HorizontalRule,
@@ -45,6 +48,7 @@ const tiptapEditor = createTestEditor({
Link,
ListItem,
OrderedList,
+ ReferenceDefinition,
Sourcemap,
Strike,
Table,
@@ -69,6 +73,7 @@ const {
div,
footnoteDefinition,
footnoteReference,
+ frontmatter,
hardBreak,
heading,
horizontalRule,
@@ -78,6 +83,7 @@ const {
listItem,
orderedList,
pre,
+ referenceDefinition,
strike,
table,
tableRow,
@@ -96,6 +102,7 @@ const {
codeBlock: { nodeType: CodeBlockHighlight.name },
footnoteDefinition: { nodeType: FootnoteDefinition.name },
footnoteReference: { nodeType: FootnoteReference.name },
+ frontmatter: { nodeType: Frontmatter.name },
hardBreak: { nodeType: HardBreak.name },
heading: { nodeType: Heading.name },
horizontalRule: { nodeType: HorizontalRule.name },
@@ -105,6 +112,7 @@ const {
listItem: { nodeType: ListItem.name },
orderedList: { nodeType: OrderedList.name },
paragraph: { nodeType: Paragraph.name },
+ referenceDefinition: { nodeType: ReferenceDefinition.name },
strike: { nodeType: Strike.name },
table: { nodeType: Table.name },
tableCell: { nodeType: TableCell.name },
@@ -253,7 +261,12 @@ describe('Client side Markdown processing', () => {
expectedDoc: doc(
paragraph(
source('<img src="bar" alt="foo" />'),
- image({ ...source('<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ image({
+ ...source('<img src="bar" alt="foo" />'),
+ alt: 'foo',
+ canonicalSrc: 'bar',
+ src: 'bar',
+ }),
),
),
},
@@ -271,7 +284,12 @@ describe('Client side Markdown processing', () => {
),
paragraph(
source('<img src="bar" alt="foo" />'),
- image({ ...source('<img src="bar" alt="foo" />'), alt: 'foo', src: 'bar' }),
+ image({
+ ...source('<img src="bar" alt="foo" />'),
+ alt: 'foo',
+ src: 'bar',
+ canonicalSrc: 'bar',
+ }),
),
),
},
@@ -284,6 +302,7 @@ describe('Client side Markdown processing', () => {
{
...source('[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
+ canonicalSrc: 'https://gitlab.com',
title: 'Go to GitLab',
},
'GitLab',
@@ -302,6 +321,7 @@ describe('Client side Markdown processing', () => {
{
...source('[GitLab](https://gitlab.com "Go to GitLab")'),
href: 'https://gitlab.com',
+ canonicalSrc: 'https://gitlab.com',
title: 'Go to GitLab',
},
'GitLab',
@@ -318,6 +338,7 @@ describe('Client side Markdown processing', () => {
link(
{
...source('www.commonmark.org'),
+ canonicalSrc: 'http://www.commonmark.org',
href: 'http://www.commonmark.org',
},
'www.commonmark.org',
@@ -334,6 +355,7 @@ describe('Client side Markdown processing', () => {
link(
{
...source('www.commonmark.org/help'),
+ canonicalSrc: 'http://www.commonmark.org/help',
href: 'http://www.commonmark.org/help',
},
'www.commonmark.org/help',
@@ -351,6 +373,7 @@ describe('Client side Markdown processing', () => {
link(
{
...source('hello+xyz@mail.example'),
+ canonicalSrc: 'mailto:hello+xyz@mail.example',
href: 'mailto:hello+xyz@mail.example',
},
'hello+xyz@mail.example',
@@ -369,6 +392,7 @@ describe('Client side Markdown processing', () => {
{
sourceMapKey: null,
sourceMarkdown: null,
+ canonicalSrc: 'https://gitlab.com',
href: 'https://gitlab.com',
},
'https://gitlab.com',
@@ -398,6 +422,7 @@ hard line break`,
image({
...source('![GitLab Logo](https://gitlab.com/logo.png "GitLab Logo")'),
alt: 'GitLab Logo',
+ canonicalSrc: 'https://gitlab.com/logo.png',
src: 'https://gitlab.com/logo.png',
title: 'GitLab Logo',
}),
@@ -591,7 +616,12 @@ two
paragraph(
source('List item with an image ![bar](foo.png)'),
'List item with an image',
- image({ ...source('![bar](foo.png)'), alt: 'bar', src: 'foo.png' }),
+ image({
+ ...source('![bar](foo.png)'),
+ alt: 'bar',
+ canonicalSrc: 'foo.png',
+ src: 'foo.png',
+ }),
),
),
),
@@ -940,8 +970,17 @@ Paragraph
paragraph(
source('[![moon](moon.jpg)](/uri)'),
link(
- { ...source('[![moon](moon.jpg)](/uri)'), href: '/uri' },
- image({ ...source('![moon](moon.jpg)'), src: 'moon.jpg', alt: 'moon' }),
+ {
+ ...source('[![moon](moon.jpg)](/uri)'),
+ canonicalSrc: '/uri',
+ href: '/uri',
+ },
+ image({
+ ...source('![moon](moon.jpg)'),
+ canonicalSrc: 'moon.jpg',
+ src: 'moon.jpg',
+ alt: 'moon',
+ }),
),
),
),
@@ -971,12 +1010,26 @@ Paragraph
source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
strike(
source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
- link({ ...source('[moon](moon.jpg)'), href: 'moon.jpg' }, 'moon'),
+ link(
+ {
+ ...source('[moon](moon.jpg)'),
+ canonicalSrc: 'moon.jpg',
+ href: 'moon.jpg',
+ },
+ 'moon',
+ ),
),
strike(source('~[moon](moon.jpg) and [sun](sun.jpg)~'), ' and '),
strike(
source('~[moon](moon.jpg) and [sun](sun.jpg)~'),
- link({ ...source('[sun](sun.jpg)'), href: 'sun.jpg' }, 'sun'),
+ link(
+ {
+ ...source('[sun](sun.jpg)'),
+ href: 'sun.jpg',
+ canonicalSrc: 'sun.jpg',
+ },
+ 'sun',
+ ),
),
),
),
@@ -1079,6 +1132,107 @@ _world_.
),
),
},
+ {
+ markdown: `
+[GitLab][gitlab-url]
+
+[gitlab-url]: https://gitlab.com "GitLab"
+
+ `,
+ expectedDoc: doc(
+ paragraph(
+ source('[GitLab][gitlab-url]'),
+ link(
+ {
+ ...source('[GitLab][gitlab-url]'),
+ href: 'https://gitlab.com',
+ canonicalSrc: 'gitlab-url',
+ title: 'GitLab',
+ isReference: true,
+ },
+ 'GitLab',
+ ),
+ ),
+ referenceDefinition(
+ {
+ ...source('[gitlab-url]: https://gitlab.com "GitLab"'),
+ identifier: 'gitlab-url',
+ url: 'https://gitlab.com',
+ title: 'GitLab',
+ },
+ '[gitlab-url]: https://gitlab.com "GitLab"',
+ ),
+ ),
+ },
+ {
+ markdown: `
+![GitLab Logo][gitlab-logo]
+
+[gitlab-logo]: https://gitlab.com/gitlab-logo.png "GitLab Logo"
+
+ `,
+ expectedDoc: doc(
+ paragraph(
+ source('![GitLab Logo][gitlab-logo]'),
+ image({
+ ...source('![GitLab Logo][gitlab-logo]'),
+ src: 'https://gitlab.com/gitlab-logo.png',
+ canonicalSrc: 'gitlab-logo',
+ alt: 'GitLab Logo',
+ title: 'GitLab Logo',
+ isReference: true,
+ }),
+ ),
+ referenceDefinition(
+ {
+ ...source('[gitlab-logo]: https://gitlab.com/gitlab-logo.png "GitLab Logo"'),
+ identifier: 'gitlab-logo',
+ url: 'https://gitlab.com/gitlab-logo.png',
+ title: 'GitLab Logo',
+ },
+ '[gitlab-logo]: https://gitlab.com/gitlab-logo.png "GitLab Logo"',
+ ),
+ ),
+ },
+ {
+ markdown: `
+---
+title: 'layout'
+---
+ `,
+ expectedDoc: doc(
+ frontmatter(
+ { ...source("---\ntitle: 'layout'\n---"), language: 'yaml' },
+ "title: 'layout'",
+ ),
+ ),
+ },
+ {
+ markdown: `
++++
+title: 'layout'
++++
+ `,
+ expectedDoc: doc(
+ frontmatter(
+ { ...source("+++\ntitle: 'layout'\n+++"), language: 'toml' },
+ "title: 'layout'",
+ ),
+ ),
+ },
+ {
+ markdown: `
+;;;
+{ title: 'layout' }
+;;;
+ `,
+ expectedDoc: doc(
+ frontmatter(
+ { ...source(";;;\n{ title: 'layout' }\n;;;"), language: 'json' },
+ "{ title: 'layout' }",
+ ),
+ ),
+ },
];
const runOnly = examples.find((example) => example.only === true);
@@ -1090,7 +1244,7 @@ _world_.
const trimmed = markdown.trim();
const document = await deserialize(trimmed);
- expect(expectedDoc).not.toBeFalsy();
+ expect(expectedDoc).not.toBe(false);
expect(document.toJSON()).toEqual(expectedDoc.toJSON());
expect(serialize(document)).toEqual(expectedMarkdown ?? trimmed);
},
@@ -1155,4 +1309,72 @@ body {
expect(tiptapEditor.getHTML()).toEqual(expectedHtml);
},
);
+
+ describe('attribute sanitization', () => {
+ // eslint-disable-next-line no-script-url
+ const protocolBasedInjectionSimpleNoSpaces = "javascript:alert('XSS');";
+ // eslint-disable-next-line no-script-url
+ const protocolBasedInjectionSimpleSpacesBefore = "javascript: alert('XSS');";
+
+ const docWithImageFactory = (urlInput, urlOutput) => {
+ const input = `<img src="${urlInput}">`;
+
+ return {
+ input,
+ expectedDoc: doc(
+ paragraph(
+ source(input),
+ image({
+ ...source(input),
+ src: urlOutput,
+ canonicalSrc: urlOutput,
+ }),
+ ),
+ ),
+ };
+ };
+
+ const docWithLinkFactory = (urlInput, urlOutput) => {
+ const input = `<a href="${urlInput}">foo</a>`;
+
+ return {
+ input,
+ expectedDoc: doc(
+ paragraph(
+ source(input),
+ link({ ...source(input), href: urlOutput, canonicalSrc: urlOutput }, 'foo'),
+ ),
+ ),
+ };
+ };
+
+ it.each`
+ desc | urlInput | urlOutput
+ ${'protocol-based JS injection: simple, no spaces'} | ${protocolBasedInjectionSimpleNoSpaces} | ${null}
+ ${'protocol-based JS injection: simple, spaces before'} | ${"javascript :alert('XSS');"} | ${null}
+ ${'protocol-based JS injection: simple, spaces after'} | ${protocolBasedInjectionSimpleSpacesBefore} | ${null}
+ ${'protocol-based JS injection: simple, spaces before and after'} | ${"javascript : alert('XSS');"} | ${null}
+ ${'protocol-based JS injection: UTF-8 encoding'} | ${'javascript&#58;'} | ${null}
+ ${'protocol-based JS injection: long UTF-8 encoding'} | ${'javascript&#0058;'} | ${null}
+ ${'protocol-based JS injection: long UTF-8 encoding without semicolons'} | ${'&#0000106&#0000097&#0000118&#0000097&#0000115&#0000099&#0000114&#0000105&#0000112&#0000116&#0000058&#0000097&#0000108&#0000101&#0000114&#0000116&#0000040&#0000039&#0000088&#0000083&#0000083&#0000039&#0000041'} | ${null}
+ ${'protocol-based JS injection: hex encoding'} | ${'javascript&#x3A;'} | ${null}
+ ${'protocol-based JS injection: long hex encoding'} | ${'javascript&#x003A;'} | ${null}
+ ${'protocol-based JS injection: hex encoding without semicolons'} | ${'&#x6A&#x61&#x76&#x61&#x73&#x63&#x72&#x69&#x70&#x74&#x3A&#x61&#x6C&#x65&#x72&#x74&#x28&#x27&#x58&#x53&#x53&#x27&#x29'} | ${null}
+ ${'protocol-based JS injection: Unicode'} | ${"\u0001java\u0003script:alert('XSS')"} | ${null}
+ ${'protocol-based JS injection: spaces and entities'} | ${"&#14; javascript:alert('XSS');"} | ${null}
+ ${'vbscript'} | ${'vbscript:alert(document.domain)'} | ${null}
+ ${'protocol-based JS injection: preceding colon'} | ${":javascript:alert('XSS');"} | ${":javascript:alert('XSS');"}
+ ${'protocol-based JS injection: null char'} | ${"java\0script:alert('XSS')"} | ${"java�script:alert('XSS')"}
+ ${'protocol-based JS injection: invalid URL char'} | ${"java\\script:alert('XSS')"} | ${"java\\script:alert('XSS')"}
+ `('sanitize $desc:\n\tURL "$urlInput" becomes "$urlOutput"', ({ urlInput, urlOutput }) => {
+ const exampleFactories = [docWithImageFactory, docWithLinkFactory];
+
+ exampleFactories.forEach(async (exampleFactory) => {
+ const { input, expectedDoc } = exampleFactory(urlInput, urlOutput);
+ const document = await deserialize(input);
+
+ expect(document.toJSON()).toEqual(expectedDoc.toJSON());
+ });
+ });
+ });
});
diff --git a/spec/frontend/content_editor/render_html_and_json_for_all_examples.js b/spec/frontend/content_editor/render_html_and_json_for_all_examples.js
index 116a26cf7d5..4a57c7b1942 100644
--- a/spec/frontend/content_editor/render_html_and_json_for_all_examples.js
+++ b/spec/frontend/content_editor/render_html_and_json_for_all_examples.js
@@ -16,6 +16,7 @@ import FigureCaption from '~/content_editor/extensions/figure_caption';
import FootnoteDefinition from '~/content_editor/extensions/footnote_definition';
import FootnoteReference from '~/content_editor/extensions/footnote_reference';
import FootnotesSection from '~/content_editor/extensions/footnotes_section';
+import Frontmatter from '~/content_editor/extensions/frontmatter';
import HardBreak from '~/content_editor/extensions/hard_break';
import Heading from '~/content_editor/extensions/heading';
import HorizontalRule from '~/content_editor/extensions/horizontal_rule';
@@ -26,6 +27,7 @@ import Italic from '~/content_editor/extensions/italic';
import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
+import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
import TableCell from '~/content_editor/extensions/table_cell';
@@ -51,6 +53,7 @@ const tiptapEditor = createTestEditor({
FootnoteDefinition,
FootnoteReference,
FootnotesSection,
+ Frontmatter,
Figure,
FigureCaption,
HardBreak,
@@ -63,6 +66,7 @@ const tiptapEditor = createTestEditor({
Link,
ListItem,
OrderedList,
+ ReferenceDefinition,
Strike,
Table,
TableCell,
diff --git a/spec/frontend/content_editor/services/markdown_serializer_spec.js b/spec/frontend/content_editor/services/markdown_serializer_spec.js
index 509cda3046c..0e5281be9bf 100644
--- a/spec/frontend/content_editor/services/markdown_serializer_spec.js
+++ b/spec/frontend/content_editor/services/markdown_serializer_spec.js
@@ -24,6 +24,7 @@ import Link from '~/content_editor/extensions/link';
import ListItem from '~/content_editor/extensions/list_item';
import OrderedList from '~/content_editor/extensions/ordered_list';
import Paragraph from '~/content_editor/extensions/paragraph';
+import ReferenceDefinition from '~/content_editor/extensions/reference_definition';
import Sourcemap from '~/content_editor/extensions/sourcemap';
import Strike from '~/content_editor/extensions/strike';
import Table from '~/content_editor/extensions/table';
@@ -63,6 +64,7 @@ const tiptapEditor = createTestEditor({
Link,
ListItem,
OrderedList,
+ ReferenceDefinition,
Sourcemap,
Strike,
Table,
@@ -104,6 +106,7 @@ const {
listItem,
orderedList,
paragraph,
+ referenceDefinition,
strike,
table,
tableCell,
@@ -139,6 +142,7 @@ const {
listItem: { nodeType: ListItem.name },
orderedList: { nodeType: OrderedList.name },
paragraph: { nodeType: Paragraph.name },
+ referenceDefinition: { nodeType: ReferenceDefinition.name },
strike: { markType: Strike.name },
table: { nodeType: Table.name },
tableCell: { nodeType: TableCell.name },
@@ -243,6 +247,37 @@ describe('markdownSerializer', () => {
).toBe('[download file](file.zip "click here to download")');
});
+ it('correctly serializes link references', () => {
+ expect(
+ serialize(
+ paragraph(
+ link(
+ {
+ href: 'gitlab-url',
+ isReference: true,
+ },
+ 'GitLab',
+ ),
+ ),
+ ),
+ ).toBe('[GitLab][gitlab-url]');
+ });
+
+ it('correctly serializes image references', () => {
+ expect(
+ serialize(
+ paragraph(
+ image({
+ canonicalSrc: 'gitlab-url',
+ src: 'image.svg',
+ alt: 'GitLab',
+ isReference: true,
+ }),
+ ),
+ ),
+ ).toBe('![GitLab][gitlab-url]');
+ });
+
it('correctly serializes strikethrough', () => {
expect(serialize(paragraph(strike('deleted content')))).toBe('~~deleted content~~');
});
@@ -1163,6 +1198,38 @@ Oranges are orange [^1]
);
});
+ it('correctly serializes reference definition', () => {
+ expect(
+ serialize(
+ referenceDefinition('[gitlab]: https://gitlab.com'),
+ referenceDefinition('[foobar]: foobar.com'),
+ ),
+ ).toBe(
+ `
+[gitlab]: https://gitlab.com
+[foobar]: foobar.com`.trimLeft(),
+ );
+ });
+
+ it('correctly adds a space between a reference definition and a block content', () => {
+ expect(
+ serialize(
+ paragraph('paragraph'),
+ referenceDefinition('[gitlab]: https://gitlab.com'),
+ referenceDefinition('[foobar]: foobar.com'),
+ heading({ level: 2 }, 'heading'),
+ ),
+ ).toBe(
+ `
+paragraph
+
+[gitlab]: https://gitlab.com
+[foobar]: foobar.com
+
+## heading`.trimLeft(),
+ );
+ });
+
const defaultEditAction = (initialContent) => {
tiptapEditor.chain().setContent(initialContent.toJSON()).insertContent(' modified').run();
};
@@ -1177,42 +1244,49 @@ Oranges are orange [^1]
};
it.each`
- mark | markdown | modifiedMarkdown | editAction
- ${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction}
- ${'bold'} | ${'__bold__'} | ${'__bold modified__'} | ${defaultEditAction}
- ${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'} | ${defaultEditAction}
- ${'bold'} | ${'<b>bold</b>'} | ${'<b>bold modified</b>'} | ${defaultEditAction}
- ${'italic'} | ${'_italic_'} | ${'_italic modified_'} | ${defaultEditAction}
- ${'italic'} | ${'*italic*'} | ${'*italic modified*'} | ${defaultEditAction}
- ${'italic'} | ${'<em>italic</em>'} | ${'<em>italic modified</em>'} | ${defaultEditAction}
- ${'italic'} | ${'<i>italic</i>'} | ${'<i>italic modified</i>'} | ${defaultEditAction}
- ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'} | ${defaultEditAction}
- ${'link'} | ${'<a href="https://gitlab.com">link</a>'} | ${'<a href="https://gitlab.com">link modified</a>'} | ${defaultEditAction}
- ${'link'} | ${'link www.gitlab.com'} | ${'modified link www.gitlab.com'} | ${prependContentEditAction}
- ${'link'} | ${'link https://www.gitlab.com'} | ${'modified link https://www.gitlab.com'} | ${prependContentEditAction}
- ${'link'} | ${'link(https://www.gitlab.com)'} | ${'modified link(https://www.gitlab.com)'} | ${prependContentEditAction}
- ${'link'} | ${'link(engineering@gitlab.com)'} | ${'modified link(engineering@gitlab.com)'} | ${prependContentEditAction}
- ${'link'} | ${'link <https://www.gitlab.com>'} | ${'modified link <https://www.gitlab.com>'} | ${prependContentEditAction}
- ${'link'} | ${'link [https://www.gitlab.com>'} | ${'modified link \\[https://www.gitlab.com>'} | ${prependContentEditAction}
- ${'link'} | ${'link <https://www.gitlab.com'} | ${'modified link <https://www.gitlab.com'} | ${prependContentEditAction}
- ${'link'} | ${'link https://www.gitlab.com>'} | ${'modified link https://www.gitlab.com>'} | ${prependContentEditAction}
- ${'link'} | ${'link **https://www.gitlab.com]**'} | ${'modified link [**https://www.gitlab.com\\]**](https://www.gitlab.com%5D)'} | ${prependContentEditAction}
- ${'code'} | ${'`code`'} | ${'`code modified`'} | ${defaultEditAction}
- ${'code'} | ${'<code>code</code>'} | ${'<code>code modified</code>'} | ${defaultEditAction}
- ${'strike'} | ${'~~striked~~'} | ${'~~striked modified~~'} | ${defaultEditAction}
- ${'strike'} | ${'<del>striked</del>'} | ${'<del>striked modified</del>'} | ${defaultEditAction}
- ${'strike'} | ${'<strike>striked</strike>'} | ${'<strike>striked modified</strike>'} | ${defaultEditAction}
- ${'strike'} | ${'<s>striked</s>'} | ${'<s>striked modified</s>'} | ${defaultEditAction}
- ${'list'} | ${'- list item'} | ${'- list item modified'} | ${defaultEditAction}
- ${'list'} | ${'* list item'} | ${'* list item modified'} | ${defaultEditAction}
- ${'list'} | ${'+ list item'} | ${'+ list item modified'} | ${defaultEditAction}
- ${'list'} | ${'- list item 1\n- list item 2'} | ${'- list item 1\n- list item 2 modified'} | ${defaultEditAction}
- ${'list'} | ${'2) list item'} | ${'2) list item modified'} | ${defaultEditAction}
- ${'list'} | ${'1. list item'} | ${'1. list item modified'} | ${defaultEditAction}
- ${'taskList'} | ${'2) [ ] task list item'} | ${'2) [ ] task list item modified'} | ${defaultEditAction}
- ${'taskList'} | ${'2) [x] task list item'} | ${'2) [x] task list item modified'} | ${defaultEditAction}
+ mark | markdown | modifiedMarkdown | editAction
+ ${'bold'} | ${'**bold**'} | ${'**bold modified**'} | ${defaultEditAction}
+ ${'bold'} | ${'__bold__'} | ${'__bold modified__'} | ${defaultEditAction}
+ ${'bold'} | ${'<strong>bold</strong>'} | ${'<strong>bold modified</strong>'} | ${defaultEditAction}
+ ${'bold'} | ${'<b>bold</b>'} | ${'<b>bold modified</b>'} | ${defaultEditAction}
+ ${'italic'} | ${'_italic_'} | ${'_italic modified_'} | ${defaultEditAction}
+ ${'italic'} | ${'*italic*'} | ${'*italic modified*'} | ${defaultEditAction}
+ ${'italic'} | ${'<em>italic</em>'} | ${'<em>italic modified</em>'} | ${defaultEditAction}
+ ${'italic'} | ${'<i>italic</i>'} | ${'<i>italic modified</i>'} | ${defaultEditAction}
+ ${'link'} | ${'[gitlab](https://gitlab.com)'} | ${'[gitlab modified](https://gitlab.com)'} | ${defaultEditAction}
+ ${'link'} | ${'<a href="https://gitlab.com">link</a>'} | ${'<a href="https://gitlab.com">link modified</a>'} | ${defaultEditAction}
+ ${'link'} | ${'link www.gitlab.com'} | ${'modified link www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com'} | ${'modified link https://www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link(https://www.gitlab.com)'} | ${'modified link(https://www.gitlab.com)'} | ${prependContentEditAction}
+ ${'link'} | ${'link(engineering@gitlab.com)'} | ${'modified link(engineering@gitlab.com)'} | ${prependContentEditAction}
+ ${'link'} | ${'link <https://www.gitlab.com>'} | ${'modified link <https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link [https://www.gitlab.com>'} | ${'modified link \\[https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link <https://www.gitlab.com'} | ${'modified link <https://www.gitlab.com'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com>'} | ${'modified link https://www.gitlab.com>'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com/path'} | ${'modified link https://www.gitlab.com/path'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com?query=search'} | ${'modified link https://www.gitlab.com?query=search'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com/#fragment'} | ${'modified link https://www.gitlab.com/#fragment'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com/?query=search'} | ${'modified link https://www.gitlab.com/?query=search'} | ${prependContentEditAction}
+ ${'link'} | ${'link https://www.gitlab.com#fragment'} | ${'modified link https://www.gitlab.com#fragment'} | ${prependContentEditAction}
+ ${'link'} | ${'link **https://www.gitlab.com]**'} | ${'modified link **https://www.gitlab.com\\]**'} | ${prependContentEditAction}
+ ${'code'} | ${'`code`'} | ${'`code modified`'} | ${defaultEditAction}
+ ${'code'} | ${'<code>code</code>'} | ${'<code>code modified</code>'} | ${defaultEditAction}
+ ${'strike'} | ${'~~striked~~'} | ${'~~striked modified~~'} | ${defaultEditAction}
+ ${'strike'} | ${'<del>striked</del>'} | ${'<del>striked modified</del>'} | ${defaultEditAction}
+ ${'strike'} | ${'<strike>striked</strike>'} | ${'<strike>striked modified</strike>'} | ${defaultEditAction}
+ ${'strike'} | ${'<s>striked</s>'} | ${'<s>striked modified</s>'} | ${defaultEditAction}
+ ${'list'} | ${'- list item'} | ${'- list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'* list item'} | ${'* list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'+ list item'} | ${'+ list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'- list item 1\n- list item 2'} | ${'- list item 1\n- list item 2 modified'} | ${defaultEditAction}
+ ${'list'} | ${'2) list item'} | ${'2) list item modified'} | ${defaultEditAction}
+ ${'list'} | ${'1. list item'} | ${'1. list item modified'} | ${defaultEditAction}
+ ${'taskList'} | ${'2) [ ] task list item'} | ${'2) [ ] task list item modified'} | ${defaultEditAction}
+ ${'taskList'} | ${'2) [x] task list item'} | ${'2) [x] task list item modified'} | ${defaultEditAction}
+ ${'image'} | ${'![image](image.png)'} | ${'![image](image.png) modified'} | ${defaultEditAction}
+ ${'footnoteReference'} | ${'[^1] footnote\n\n[^1]: footnote definition'} | ${'modified [^1] footnote\n\n[^1]: footnote definition'} | ${prependContentEditAction}
`(
- 'preserves original $mark syntax when sourceMarkdown is available for $content',
+ 'preserves original $mark syntax when sourceMarkdown is available for $markdown',
async ({ markdown, modifiedMarkdown, editAction }) => {
const { document } = await remarkMarkdownDeserializer().deserialize({
schema: tiptapEditor.schema,
diff --git a/spec/frontend/content_editor/services/table_of_contents_utils_spec.js b/spec/frontend/content_editor/services/table_of_contents_utils_spec.js
new file mode 100644
index 00000000000..7f63c2171c2
--- /dev/null
+++ b/spec/frontend/content_editor/services/table_of_contents_utils_spec.js
@@ -0,0 +1,96 @@
+import Heading from '~/content_editor/extensions/heading';
+import { toTree, getHeadings } from '~/content_editor/services/table_of_contents_utils';
+import { createTestEditor, createDocBuilder } from '../test_utils';
+
+describe('content_editor/services/table_of_content_utils', () => {
+ describe('toTree', () => {
+ it('should fills in gaps in heading levels and convert headings to a tree', () => {
+ expect(
+ toTree([
+ { level: 3, text: '3' },
+ { level: 2, text: '2' },
+ ]),
+ ).toEqual([
+ expect.objectContaining({
+ level: 1,
+ text: '',
+ subHeadings: [
+ expect.objectContaining({
+ level: 2,
+ text: '',
+ subHeadings: [expect.objectContaining({ level: 3, text: '3', subHeadings: [] })],
+ }),
+ expect.objectContaining({ level: 2, text: '2', subHeadings: [] }),
+ ],
+ }),
+ ]);
+ });
+ });
+
+ describe('getHeadings', () => {
+ const tiptapEditor = createTestEditor({
+ extensions: [Heading],
+ });
+
+ const {
+ builders: { heading, doc },
+ } = createDocBuilder({
+ tiptapEditor,
+ names: {
+ heading: { nodeType: Heading.name },
+ },
+ });
+
+ it('gets all headings as a tree in a tiptap document', () => {
+ const initialDoc = doc(
+ heading({ level: 1 }, 'Heading 1'),
+ heading({ level: 2 }, 'Heading 1.1'),
+ heading({ level: 3 }, 'Heading 1.1.1'),
+ heading({ level: 2 }, 'Heading 1.2'),
+ heading({ level: 3 }, 'Heading 1.2.1'),
+ heading({ level: 2 }, 'Heading 1.3'),
+ heading({ level: 2 }, 'Heading 1.4'),
+ heading({ level: 3 }, 'Heading 1.4.1'),
+ heading({ level: 1 }, 'Heading 2'),
+ );
+
+ tiptapEditor.commands.setContent(initialDoc.toJSON());
+
+ expect(getHeadings(tiptapEditor)).toEqual([
+ expect.objectContaining({
+ level: 1,
+ text: 'Heading 1',
+ subHeadings: [
+ expect.objectContaining({
+ level: 2,
+ text: 'Heading 1.1',
+ subHeadings: [
+ expect.objectContaining({ level: 3, text: 'Heading 1.1.1', subHeadings: [] }),
+ ],
+ }),
+ expect.objectContaining({
+ level: 2,
+ text: 'Heading 1.2',
+ subHeadings: [
+ expect.objectContaining({ level: 3, text: 'Heading 1.2.1', subHeadings: [] }),
+ ],
+ }),
+ expect.objectContaining({ level: 2, text: 'Heading 1.3', subHeadings: [] }),
+ expect.objectContaining({
+ level: 2,
+ text: 'Heading 1.4',
+ subHeadings: [
+ expect.objectContaining({ level: 3, text: 'Heading 1.4.1', subHeadings: [] }),
+ ],
+ }),
+ ],
+ }),
+ expect.objectContaining({
+ level: 1,
+ text: 'Heading 2',
+ subHeadings: [],
+ }),
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/crm/contact_form_wrapper_spec.js b/spec/frontend/crm/contact_form_wrapper_spec.js
index 5e1743701e4..e49b553e4b5 100644
--- a/spec/frontend/crm/contact_form_wrapper_spec.js
+++ b/spec/frontend/crm/contact_form_wrapper_spec.js
@@ -56,8 +56,9 @@ describe('Customer relations contact form wrapper', () => {
${'edit'} | ${'Edit contact'} | ${'Contact has been updated.'} | ${updateContactMutation} | ${contacts[0].id}
${'create'} | ${'New contact'} | ${'Contact has been added.'} | ${createContactMutation} | ${null}
`('in $mode mode', ({ mode, title, successMessage, mutation, existingId }) => {
+ const isEditMode = mode === 'edit';
+
beforeEach(() => {
- const isEditMode = mode === 'edit';
mountComponent({ isEditMode });
return waitForPromises();
@@ -82,7 +83,7 @@ describe('Customer relations contact form wrapper', () => {
});
it('renders correct fields prop', () => {
- expect(findContactForm().props('fields')).toEqual([
+ const fields = [
{ name: 'firstName', label: 'First name', required: true },
{ name: 'lastName', label: 'Last name', required: true },
{ name: 'email', label: 'Email', required: true },
@@ -98,7 +99,9 @@ describe('Customer relations contact form wrapper', () => {
],
},
{ name: 'description', label: 'Description' },
- ]);
+ ];
+ if (isEditMode) fields.push({ name: 'active', label: 'Active', required: true, bool: true });
+ expect(findContactForm().props('fields')).toEqual(fields);
});
it('renders correct title prop', () => {
diff --git a/spec/frontend/crm/contacts_root_spec.js b/spec/frontend/crm/contacts_root_spec.js
index 3a6989a00f1..7aaaf480c44 100644
--- a/spec/frontend/crm/contacts_root_spec.js
+++ b/spec/frontend/crm/contacts_root_spec.js
@@ -1,14 +1,16 @@
-import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
-import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import ContactsRoot from '~/crm/contacts/components/contacts_root.vue';
import getGroupContactsQuery from '~/crm/contacts/components/graphql/get_group_contacts.query.graphql';
+import getGroupContactsCountByStateQuery from '~/crm/contacts/components/graphql/get_group_contacts_count_by_state.graphql';
import routes from '~/crm/contacts/routes';
-import { getGroupContactsQueryResponse } from './mock_data';
+import PaginatedTableWithSearchAndTabs from '~/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs.vue';
+import { getGroupContactsQueryResponse, getGroupContactsCountQueryResponse } from './mock_data';
describe('Customer relations contacts root app', () => {
Vue.use(VueApollo);
@@ -21,24 +23,30 @@ describe('Customer relations contacts root app', () => {
const findRowByName = (rowName) => wrapper.findAllByRole('row', { name: rowName });
const findIssuesLinks = () => wrapper.findAllByTestId('issues-link');
const findNewContactButton = () => wrapper.findByTestId('new-contact-button');
- const findError = () => wrapper.findComponent(GlAlert);
+ const findTable = () => wrapper.findComponent(PaginatedTableWithSearchAndTabs);
const successQueryHandler = jest.fn().mockResolvedValue(getGroupContactsQueryResponse);
+ const successCountQueryHandler = jest.fn().mockResolvedValue(getGroupContactsCountQueryResponse);
const basePath = '/groups/flightjs/-/crm/contacts';
const mountComponent = ({
queryHandler = successQueryHandler,
- mountFunction = shallowMountExtended,
+ countQueryHandler = successCountQueryHandler,
canAdminCrmContact = true,
+ textQuery = null,
} = {}) => {
- fakeApollo = createMockApollo([[getGroupContactsQuery, queryHandler]]);
- wrapper = mountFunction(ContactsRoot, {
+ fakeApollo = createMockApollo([
+ [getGroupContactsQuery, queryHandler],
+ [getGroupContactsCountByStateQuery, countQueryHandler],
+ ]);
+ wrapper = mountExtended(ContactsRoot, {
router,
provide: {
groupFullPath: 'flightjs',
groupId: 26,
groupIssuesPath: '/issues',
canAdminCrmContact,
+ textQuery,
},
apolloProvider: fakeApollo,
});
@@ -58,9 +66,33 @@ describe('Customer relations contacts root app', () => {
router = null;
});
- it('should render loading spinner', () => {
+ it('should render table with default props and loading state', () => {
mountComponent();
+ expect(findTable().props()).toMatchObject({
+ items: [],
+ itemsCount: {},
+ pageInfo: {},
+ statusTabs: [
+ { title: 'Active', status: 'ACTIVE', filters: 'active' },
+ { title: 'Inactive', status: 'INACTIVE', filters: 'inactive' },
+ { title: 'All', status: 'ALL', filters: 'all' },
+ ],
+ showItems: true,
+ showErrorMsg: false,
+ trackViewsOptions: { category: 'Customer Relations', action: 'view_contacts_list' },
+ i18n: {
+ emptyText: 'No contacts found',
+ issuesButtonLabel: 'View issues',
+ editButtonLabel: 'Edit',
+ title: 'Customer relations contacts',
+ newContact: 'New contact',
+ errorText: 'Something went wrong. Please try again.',
+ },
+ serverErrorMessage: '',
+ filterSearchKey: 'contacts',
+ filterSearchTokens: [],
+ });
expect(findLoadingIcon().exists()).toBe(true);
});
@@ -83,7 +115,7 @@ describe('Customer relations contacts root app', () => {
mountComponent({ queryHandler: jest.fn().mockRejectedValue('ERROR') });
await waitForPromises();
- expect(findError().exists()).toBe(true);
+ expect(wrapper.text()).toContain('Something went wrong. Please try again.');
});
});
@@ -92,11 +124,11 @@ describe('Customer relations contacts root app', () => {
mountComponent();
await waitForPromises();
- expect(findError().exists()).toBe(false);
+ expect(wrapper.text()).not.toContain('Something went wrong. Please try again.');
});
it('renders correct results', async () => {
- mountComponent({ mountFunction: mountExtended });
+ mountComponent();
await waitForPromises();
expect(findRowByName(/Marty/i)).toHaveLength(1);
@@ -105,7 +137,7 @@ describe('Customer relations contacts root app', () => {
const issueLink = findIssuesLinks().at(0);
expect(issueLink.exists()).toBe(true);
- expect(issueLink.attributes('href')).toBe('/issues?crm_contact_id=16');
+ expect(issueLink.attributes('href')).toBe('/issues?crm_contact_id=12');
});
});
});
diff --git a/spec/frontend/crm/form_spec.js b/spec/frontend/crm/form_spec.js
index d39f0795f5f..f0e9150cada 100644
--- a/spec/frontend/crm/form_spec.js
+++ b/spec/frontend/crm/form_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlFormInput, GlFormSelect, GlFormGroup } from '@gitlab/ui';
+import { GlAlert, GlFormCheckbox, GlFormInput, GlFormSelect, GlFormGroup } from '@gitlab/ui';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import VueRouter from 'vue-router';
@@ -78,6 +78,7 @@ describe('Reusable form component', () => {
const findSaveButton = () => wrapper.findByTestId('save-button');
const findForm = () => wrapper.find('form');
const findError = () => wrapper.findComponent(GlAlert);
+ const findFormGroup = (at) => wrapper.findAllComponents(GlFormGroup).at(at);
const mountComponent = (propsData) => {
wrapper = shallowMountExtended(Form, {
@@ -92,7 +93,7 @@ describe('Reusable form component', () => {
});
};
- const mountContact = ({ propsData } = {}) => {
+ const mountContact = ({ propsData, extraFields = [] } = {}) => {
mountComponent({
fields: [
{ name: 'firstName', label: 'First name', required: true },
@@ -108,6 +109,7 @@ describe('Reusable form component', () => {
{ key: 'gid://gitlab/CustomerRelations::Organization/2', value: 'ABC Corp' },
],
},
+ ...extraFields,
],
getQuery: {
query: getGroupContactsQuery,
@@ -136,7 +138,8 @@ describe('Reusable form component', () => {
mutation: updateContactMutation,
existingId: 'gid://gitlab/CustomerRelations::Contact/12',
};
- mountContact({ propsData });
+ const extraFields = [{ name: 'active', label: 'Active', required: true, bool: true }];
+ mountContact({ propsData, extraFields });
};
const mountOrganization = ({ propsData } = {}) => {
@@ -285,18 +288,16 @@ describe('Reusable form component', () => {
});
it.each`
- index | id | componentName | value
- ${0} | ${'firstName'} | ${'GlFormInput'} | ${'Marty'}
- ${1} | ${'lastName'} | ${'GlFormInput'} | ${'McFly'}
- ${2} | ${'email'} | ${'GlFormInput'} | ${'example@gitlab.com'}
- ${4} | ${'description'} | ${'GlFormInput'} | ${undefined}
- ${3} | ${'phone'} | ${'GlFormInput'} | ${undefined}
- ${5} | ${'organizationId'} | ${'GlFormSelect'} | ${'gid://gitlab/CustomerRelations::Organization/2'}
+ index | id | component | value
+ ${0} | ${'firstName'} | ${GlFormInput} | ${'Marty'}
+ ${1} | ${'lastName'} | ${GlFormInput} | ${'McFly'}
+ ${2} | ${'email'} | ${GlFormInput} | ${'example@gitlab.com'}
+ ${4} | ${'description'} | ${GlFormInput} | ${undefined}
+ ${3} | ${'phone'} | ${GlFormInput} | ${undefined}
+ ${5} | ${'organizationId'} | ${GlFormSelect} | ${'gid://gitlab/CustomerRelations::Organization/2'}
`(
- 'should render a $componentName for #$id with the value "$value"',
- ({ index, id, componentName, value }) => {
- const component = componentName === 'GlFormInput' ? GlFormInput : GlFormSelect;
- const findFormGroup = (at) => wrapper.findAllComponents(GlFormGroup).at(at);
+ 'should render the correct component for #$id with the value "$value"',
+ ({ index, id, component, value }) => {
const findFormElement = () => findFormGroup(index).find(component);
expect(findFormElement().attributes('id')).toBe(id);
@@ -304,6 +305,14 @@ describe('Reusable form component', () => {
},
);
+ it('should render a checked GlFormCheckbox for #active', () => {
+ const activeCheckboxIndex = 6;
+ const findFormElement = () => findFormGroup(activeCheckboxIndex).find(GlFormCheckbox);
+
+ expect(findFormElement().attributes('id')).toBe('active');
+ expect(findFormElement().attributes('checked')).toBe('true');
+ });
+
it('should include updated values in update mutation', () => {
wrapper.find('#firstName').vm.$emit('input', 'Michael');
wrapper
@@ -314,6 +323,7 @@ describe('Reusable form component', () => {
expect(handler).toHaveBeenCalledWith('updateContact', {
input: {
+ active: true,
description: null,
email: 'example@gitlab.com',
firstName: 'Michael',
diff --git a/spec/frontend/crm/mock_data.js b/spec/frontend/crm/mock_data.js
index 35bc7fb69b4..a2e2e88ac60 100644
--- a/spec/frontend/crm/mock_data.js
+++ b/spec/frontend/crm/mock_data.js
@@ -13,6 +13,7 @@ export const getGroupContactsQueryResponse = {
email: 'example@gitlab.com',
phone: null,
description: null,
+ active: true,
organization: {
__typename: 'CustomerRelationsOrganization',
id: 'gid://gitlab/CustomerRelations::Organization/2',
@@ -27,6 +28,7 @@ export const getGroupContactsQueryResponse = {
email: null,
phone: null,
description: null,
+ active: true,
organization: null,
},
{
@@ -37,9 +39,32 @@ export const getGroupContactsQueryResponse = {
email: 'jd@gitlab.com',
phone: '+44 44 4444 4444',
description: 'Vice President',
+ active: true,
organization: null,
},
],
+ pageInfo: {
+ __typename: 'PageInfo',
+ hasNextPage: false,
+ endCursor: 'eyJsYXN0X25hbWUiOiJMZWRuZXIiLCJpZCI6IjE3OSJ9',
+ hasPreviousPage: false,
+ startCursor: 'eyJsYXN0X25hbWUiOiJCYXJ0b24iLCJpZCI6IjE5MyJ9',
+ },
+ },
+ },
+ },
+};
+
+export const getGroupContactsCountQueryResponse = {
+ data: {
+ group: {
+ __typename: 'Group',
+ id: 'gid://gitlab/Group/26',
+ contactStateCounts: {
+ all: 241,
+ active: 239,
+ inactive: 2,
+ __typename: 'ContactStateCountsType',
},
},
},
@@ -58,6 +83,7 @@ export const getGroupOrganizationsQueryResponse = {
name: 'Test Inc',
defaultRate: 100,
description: null,
+ active: true,
},
{
__typename: 'CustomerRelationsOrganization',
@@ -65,6 +91,7 @@ export const getGroupOrganizationsQueryResponse = {
name: 'ABC Company',
defaultRate: 110,
description: 'VIP',
+ active: true,
},
{
__typename: 'CustomerRelationsOrganization',
@@ -72,6 +99,7 @@ export const getGroupOrganizationsQueryResponse = {
name: 'GitLab',
defaultRate: 120,
description: null,
+ active: true,
},
],
},
@@ -91,6 +119,7 @@ export const createContactMutationResponse = {
phone: null,
description: null,
organization: null,
+ active: true,
},
errors: [],
},
@@ -119,6 +148,7 @@ export const updateContactMutationResponse = {
phone: null,
description: null,
organization: null,
+ active: true,
},
errors: [],
},
@@ -143,6 +173,7 @@ export const createOrganizationMutationResponse = {
name: 'A',
defaultRate: null,
description: null,
+ active: true,
},
errors: [],
},
@@ -168,6 +199,7 @@ export const updateOrganizationMutationResponse = {
name: 'A',
defaultRate: null,
description: null,
+ active: true,
},
errors: [],
},
diff --git a/spec/frontend/crm/organization_form_wrapper_spec.js b/spec/frontend/crm/organization_form_wrapper_spec.js
index 1a5a7c6ca5d..9f26b9157e6 100644
--- a/spec/frontend/crm/organization_form_wrapper_spec.js
+++ b/spec/frontend/crm/organization_form_wrapper_spec.js
@@ -49,7 +49,7 @@ describe('Customer relations organization form wrapper', () => {
mountComponent({ isEditMode: true });
const organizationForm = findOrganizationForm();
- expect(organizationForm.props('fields')).toHaveLength(3);
+ expect(organizationForm.props('fields')).toHaveLength(4);
expect(organizationForm.props('title')).toBe('Edit organization');
expect(organizationForm.props('successMessage')).toBe('Organization has been updated.');
expect(organizationForm.props('mutation')).toBe(updateOrganizationMutation);
diff --git a/spec/frontend/cycle_analytics/base_spec.js b/spec/frontend/cycle_analytics/base_spec.js
index 7b1ef71da63..ea3da86c7b2 100644
--- a/spec/frontend/cycle_analytics/base_spec.js
+++ b/spec/frontend/cycle_analytics/base_spec.js
@@ -11,7 +11,6 @@ import ValueStreamFilters from '~/cycle_analytics/components/value_stream_filter
import { NOT_ENOUGH_DATA_ERROR } from '~/cycle_analytics/constants';
import initState from '~/cycle_analytics/store/state';
import {
- permissions,
transformedProjectStagePathData,
selectedStage,
issueEvents,
@@ -34,7 +33,6 @@ let wrapper;
const { id: groupId, path: groupPath } = currentGroup;
const defaultState = {
- permissions,
currentGroup,
createdBefore,
createdAfter,
@@ -240,24 +238,6 @@ describe('Value stream analytics component', () => {
});
});
- describe('without enough permissions', () => {
- beforeEach(() => {
- wrapper = createComponent({
- initialState: {
- selectedStage,
- permissions: {
- ...permissions,
- [selectedStage.id]: false,
- },
- },
- });
- });
-
- it('renders the empty stage with `You need permission.` message', () => {
- expect(findEmptyStageTitle()).toBe('You need permission.');
- });
- });
-
describe('without a selected stage', () => {
beforeEach(() => {
wrapper = createComponent({
diff --git a/spec/frontend/cycle_analytics/mock_data.js b/spec/frontend/cycle_analytics/mock_data.js
index 1fe1dbbb75c..02666260cdb 100644
--- a/spec/frontend/cycle_analytics/mock_data.js
+++ b/spec/frontend/cycle_analytics/mock_data.js
@@ -101,30 +101,12 @@ export const selectedStage = {
...issueStage,
value: null,
active: false,
- isUserAllowed: true,
emptyStageText:
'The issue stage shows the time it takes from creating an issue to assigning the issue to a milestone, or add the issue to a list on your Issue Board. Begin creating issues to see data for this stage.',
slug: 'issue',
};
-export const stats = [issueStage, planStage, codeStage, testStage, reviewStage, stagingStage];
-
-export const permissions = {
- issue: true,
- plan: true,
- code: true,
- test: true,
- review: true,
- staging: true,
-};
-
-export const rawData = {
- summary,
- stats,
- permissions,
-};
-
export const convertedData = {
summary: [
{ value: '20', title: 'New Issues' },
diff --git a/spec/frontend/cycle_analytics/store/actions_spec.js b/spec/frontend/cycle_analytics/store/actions_spec.js
index e775e941b4c..94b6de85a5c 100644
--- a/spec/frontend/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/cycle_analytics/store/actions_spec.js
@@ -153,6 +153,19 @@ describe('Project Value Stream Analytics actions', () => {
});
});
});
+
+ describe('with no value stream stages available', () => {
+ it('will return SET_NO_ACCESS_ERROR', () => {
+ state = { ...state, stages: [] };
+ testAction({
+ action: actions.setInitialStage,
+ state,
+ payload: null,
+ expectedMutations: [{ type: 'SET_NO_ACCESS_ERROR' }],
+ expectedActions: [],
+ });
+ });
+ });
});
describe('updateStageTablePagination', () => {
@@ -170,46 +183,6 @@ describe('Project Value Stream Analytics actions', () => {
});
});
- describe('fetchCycleAnalyticsData', () => {
- beforeEach(() => {
- state = { ...defaultState, endpoints: mockEndpoints };
- mock = new MockAdapter(axios);
- mock.onGet(mockRequestPath).reply(httpStatusCodes.OK);
- });
-
- it(`dispatches the 'setSelectedStage' and 'fetchStageData' actions`, () =>
- testAction({
- action: actions.fetchCycleAnalyticsData,
- state,
- payload: {},
- expectedMutations: [
- { type: 'REQUEST_CYCLE_ANALYTICS_DATA' },
- { type: 'RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS' },
- ],
- expectedActions: [],
- }));
-
- describe('with a failing request', () => {
- beforeEach(() => {
- state = { endpoints: mockEndpoints };
- mock = new MockAdapter(axios);
- mock.onGet(mockRequestPath).reply(httpStatusCodes.BAD_REQUEST);
- });
-
- it(`commits the 'RECEIVE_CYCLE_ANALYTICS_DATA_ERROR' mutation`, () =>
- testAction({
- action: actions.fetchCycleAnalyticsData,
- state,
- payload: {},
- expectedMutations: [
- { type: 'REQUEST_CYCLE_ANALYTICS_DATA' },
- { type: 'RECEIVE_CYCLE_ANALYTICS_DATA_ERROR' },
- ],
- expectedActions: [],
- }));
- });
- });
-
describe('fetchStageData', () => {
const mockStagePath = /value_streams\/\w+\/stages\/\w+\/records/;
const headers = {
@@ -529,14 +502,13 @@ describe('Project Value Stream Analytics actions', () => {
});
describe('fetchValueStreamStageData', () => {
- it('will dispatch the fetchCycleAnalyticsData, fetchStageData, fetchStageMedians and fetchStageCountValues actions', () =>
+ it('will dispatch the fetchStageData, fetchStageMedians and fetchStageCountValues actions', () =>
testAction({
action: actions.fetchValueStreamStageData,
state,
payload: {},
expectedMutations: [],
expectedActions: [
- { type: 'fetchCycleAnalyticsData' },
{ type: 'fetchStageData' },
{ type: 'fetchStageMedians' },
{ type: 'fetchStageCountValues' },
diff --git a/spec/frontend/cycle_analytics/store/mutations_spec.js b/spec/frontend/cycle_analytics/store/mutations_spec.js
index 2670a390e9c..2e9e5d91471 100644
--- a/spec/frontend/cycle_analytics/store/mutations_spec.js
+++ b/spec/frontend/cycle_analytics/store/mutations_spec.js
@@ -38,31 +38,24 @@ describe('Project Value Stream Analytics mutations', () => {
});
it.each`
- mutation | stateKey | value
- ${types.REQUEST_VALUE_STREAMS} | ${'valueStreams'} | ${[]}
- ${types.RECEIVE_VALUE_STREAMS_ERROR} | ${'valueStreams'} | ${[]}
- ${types.REQUEST_VALUE_STREAM_STAGES} | ${'stages'} | ${[]}
- ${types.RECEIVE_VALUE_STREAM_STAGES_ERROR} | ${'stages'} | ${[]}
- ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'isLoading'} | ${true}
- ${types.REQUEST_CYCLE_ANALYTICS_DATA} | ${'hasError'} | ${false}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_SUCCESS} | ${'hasError'} | ${false}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'isLoading'} | ${false}
- ${types.RECEIVE_CYCLE_ANALYTICS_DATA_ERROR} | ${'hasError'} | ${true}
- ${types.REQUEST_STAGE_DATA} | ${'isLoadingStage'} | ${true}
- ${types.REQUEST_STAGE_DATA} | ${'isEmptyStage'} | ${false}
- ${types.REQUEST_STAGE_DATA} | ${'hasError'} | ${false}
- ${types.REQUEST_STAGE_DATA} | ${'selectedStageEvents'} | ${[]}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'isLoadingStage'} | ${false}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'selectedStageEvents'} | ${[]}
- ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'hasError'} | ${false}
- ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isLoadingStage'} | ${false}
- ${types.RECEIVE_STAGE_DATA_ERROR} | ${'selectedStageEvents'} | ${[]}
- ${types.RECEIVE_STAGE_DATA_ERROR} | ${'hasError'} | ${true}
- ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isEmptyStage'} | ${true}
- ${types.REQUEST_STAGE_MEDIANS} | ${'medians'} | ${{}}
- ${types.RECEIVE_STAGE_MEDIANS_ERROR} | ${'medians'} | ${{}}
- ${types.REQUEST_STAGE_COUNTS} | ${'stageCounts'} | ${{}}
- ${types.RECEIVE_STAGE_COUNTS_ERROR} | ${'stageCounts'} | ${{}}
+ mutation | stateKey | value
+ ${types.REQUEST_VALUE_STREAMS} | ${'valueStreams'} | ${[]}
+ ${types.RECEIVE_VALUE_STREAMS_ERROR} | ${'valueStreams'} | ${[]}
+ ${types.REQUEST_VALUE_STREAM_STAGES} | ${'stages'} | ${[]}
+ ${types.RECEIVE_VALUE_STREAM_STAGES_ERROR} | ${'stages'} | ${[]}
+ ${types.REQUEST_STAGE_DATA} | ${'isLoadingStage'} | ${true}
+ ${types.REQUEST_STAGE_DATA} | ${'isEmptyStage'} | ${false}
+ ${types.REQUEST_STAGE_DATA} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'isLoadingStage'} | ${false}
+ ${types.RECEIVE_STAGE_DATA_SUCCESS} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isLoadingStage'} | ${false}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'selectedStageEvents'} | ${[]}
+ ${types.RECEIVE_STAGE_DATA_ERROR} | ${'isEmptyStage'} | ${true}
+ ${types.REQUEST_STAGE_MEDIANS} | ${'medians'} | ${{}}
+ ${types.RECEIVE_STAGE_MEDIANS_ERROR} | ${'medians'} | ${{}}
+ ${types.REQUEST_STAGE_COUNTS} | ${'stageCounts'} | ${{}}
+ ${types.RECEIVE_STAGE_COUNTS_ERROR} | ${'stageCounts'} | ${{}}
+ ${types.SET_NO_ACCESS_ERROR} | ${'hasNoAccessError'} | ${true}
`('$mutation will set $stateKey to $value', ({ mutation, stateKey, value }) => {
mutations[mutation](state);
diff --git a/spec/frontend/design_management/components/delete_button_spec.js b/spec/frontend/design_management/components/delete_button_spec.js
index e3907fdbe15..cee1eec792d 100644
--- a/spec/frontend/design_management/components/delete_button_spec.js
+++ b/spec/frontend/design_management/components/delete_button_spec.js
@@ -6,8 +6,8 @@ import BatchDeleteButton from '~/design_management/components/delete_button.vue'
describe('Batch delete button component', () => {
let wrapper;
- const findButton = () => wrapper.find(GlButton);
- const findModal = () => wrapper.find(GlModal);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findModal = () => wrapper.findComponent(GlModal);
function createComponent({ isDeleting = false } = {}, { slots = {} } = {}) {
wrapper = shallowMount(BatchDeleteButton, {
diff --git a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
index 77935fbde11..2091e1e08dd 100644
--- a/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_discussion_spec.js
@@ -26,13 +26,13 @@ describe('Design discussions component', () => {
const originalGon = window.gon;
let wrapper;
- const findDesignNotes = () => wrapper.findAll(DesignNote);
- const findReplyPlaceholder = () => wrapper.find(ReplyPlaceholder);
- const findReplyForm = () => wrapper.find(DesignReplyForm);
- const findRepliesWidget = () => wrapper.find(ToggleRepliesWidget);
+ const findDesignNotes = () => wrapper.findAllComponents(DesignNote);
+ const findReplyPlaceholder = () => wrapper.findComponent(ReplyPlaceholder);
+ const findReplyForm = () => wrapper.findComponent(DesignReplyForm);
+ const findRepliesWidget = () => wrapper.findComponent(ToggleRepliesWidget);
const findResolveButton = () => wrapper.find('[data-testid="resolve-button"]');
const findResolvedMessage = () => wrapper.find('[data-testid="resolved-message"]');
- const findResolveLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findResolveLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findResolveCheckbox = () => wrapper.find('[data-testid="resolve-checkbox"]');
const findApolloMutation = () => wrapper.findComponent(ApolloMutation);
@@ -307,7 +307,7 @@ describe('Design discussions component', () => {
expect(
wrapper
- .findAll(DesignNote)
+ .findAllComponents(DesignNote)
.wrappers.every((designNote) => designNote.classes('gl-bg-blue-50')),
).toBe(true);
},
@@ -351,7 +351,7 @@ describe('Design discussions component', () => {
createComponent();
findReplyPlaceholder().vm.$emit('focus');
- expect(wrapper.emitted('open-form')).toBeTruthy();
+ expect(wrapper.emitted('open-form')).toHaveLength(1);
});
describe('when user is not logged in', () => {
diff --git a/spec/frontend/design_management/components/design_notes/design_note_spec.js b/spec/frontend/design_management/components/design_notes/design_note_spec.js
index 1f84fde9f7f..28833b4af5c 100644
--- a/spec/frontend/design_management/components/design_notes/design_note_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_note_spec.js
@@ -100,7 +100,7 @@ describe('Design note component', () => {
note,
});
- expect(wrapper.find(TimeAgoTooltip).exists()).toBe(true);
+ expect(wrapper.findComponent(TimeAgoTooltip).exists()).toBe(true);
});
it('should not render edit icon when user does not have a permission', () => {
diff --git a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
index d2d1fe6b2d8..f7ce742b933 100644
--- a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
@@ -15,9 +15,9 @@ describe('Design reply form component', () => {
let wrapper;
const findTextarea = () => wrapper.find('textarea');
- const findSubmitButton = () => wrapper.find({ ref: 'submitButton' });
- const findCancelButton = () => wrapper.find({ ref: 'cancelButton' });
- const findModal = () => wrapper.find({ ref: 'cancelCommentModal' });
+ const findSubmitButton = () => wrapper.findComponent({ ref: 'submitButton' });
+ const findCancelButton = () => wrapper.findComponent({ ref: 'cancelButton' });
+ const findModal = () => wrapper.findComponent({ ref: 'cancelCommentModal' });
function createComponent(props = {}, mountOptions = {}) {
wrapper = mount(DesignReplyForm, {
@@ -42,6 +42,18 @@ describe('Design reply form component', () => {
expect(findTextarea().element).toEqual(document.activeElement);
});
+ it('renders "Attach a file or image" button in markdown toolbar', () => {
+ createComponent();
+
+ expect(wrapper.find('[data-testid="button-attach-file"]').exists()).toBe(true);
+ });
+
+ it('renders file upload progress container', () => {
+ createComponent();
+
+ expect(wrapper.find('.comment-toolbar .uploading-container').exists()).toBe(true);
+ });
+
it('renders button text as "Comment" when creating a comment', () => {
createComponent();
diff --git a/spec/frontend/design_management/components/design_notes/toggle_replies_widget_spec.js b/spec/frontend/design_management/components/design_notes/toggle_replies_widget_spec.js
index f87228663b6..41129e2b58d 100644
--- a/spec/frontend/design_management/components/design_notes/toggle_replies_widget_spec.js
+++ b/spec/frontend/design_management/components/design_notes/toggle_replies_widget_spec.js
@@ -8,10 +8,10 @@ describe('Toggle replies widget component', () => {
let wrapper;
const findToggleWrapper = () => wrapper.find('[data-testid="toggle-comments-wrapper"]');
- const findIcon = () => wrapper.find(GlIcon);
- const findButton = () => wrapper.find(GlButton);
- const findAuthorLink = () => wrapper.find(GlLink);
- const findTimeAgo = () => wrapper.find(TimeAgoTooltip);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findButton = () => wrapper.findComponent(GlButton);
+ const findAuthorLink = () => wrapper.findComponent(GlLink);
+ const findTimeAgo = () => wrapper.findComponent(TimeAgoTooltip);
function createComponent(props = {}) {
wrapper = shallowMount(ToggleRepliesWidget, {
diff --git a/spec/frontend/design_management/components/design_scaler_spec.js b/spec/frontend/design_management/components/design_scaler_spec.js
index a04e2ebda5b..e1a66cea329 100644
--- a/spec/frontend/design_management/components/design_scaler_spec.js
+++ b/spec/frontend/design_management/components/design_scaler_spec.js
@@ -6,7 +6,7 @@ import DesignScaler from '~/design_management/components/design_scaler.vue';
describe('Design management design scaler component', () => {
let wrapper;
- const getButtons = () => wrapper.findAll(GlButton);
+ const getButtons = () => wrapper.findAllComponents(GlButton);
const getDecreaseScaleButton = () => getButtons().at(0);
const getResetScaleButton = () => getButtons().at(1);
const getIncreaseScaleButton = () => getButtons().at(2);
diff --git a/spec/frontend/design_management/components/design_sidebar_spec.js b/spec/frontend/design_management/components/design_sidebar_spec.js
index f13796138bd..af995f75ddc 100644
--- a/spec/frontend/design_management/components/design_sidebar_spec.js
+++ b/spec/frontend/design_management/components/design_sidebar_spec.js
@@ -32,12 +32,12 @@ describe('Design management design sidebar component', () => {
const originalGon = window.gon;
let wrapper;
- const findDiscussions = () => wrapper.findAll(DesignDiscussion);
+ const findDiscussions = () => wrapper.findAllComponents(DesignDiscussion);
const findFirstDiscussion = () => findDiscussions().at(0);
const findUnresolvedDiscussions = () => wrapper.findAll('[data-testid="unresolved-discussion"]');
const findResolvedDiscussions = () => wrapper.findAll('[data-testid="resolved-discussion"]');
- const findParticipants = () => wrapper.find(Participants);
- const findResolvedCommentsToggle = () => wrapper.find(GlAccordionItem);
+ const findParticipants = () => wrapper.findComponent(Participants);
+ const findResolvedCommentsToggle = () => wrapper.findComponent(GlAccordionItem);
const findNewDiscussionDisclaimer = () =>
wrapper.find('[data-testid="new-discussion-disclaimer"]');
@@ -87,7 +87,7 @@ describe('Design management design sidebar component', () => {
it('renders To-Do button', () => {
createComponent();
- expect(wrapper.find(DesignTodoButton).exists()).toBe(true);
+ expect(wrapper.findComponent(DesignTodoButton).exists()).toBe(true);
});
describe('when has no discussions', () => {
diff --git a/spec/frontend/design_management/components/design_todo_button_spec.js b/spec/frontend/design_management/components/design_todo_button_spec.js
index 73661c9fcb0..b3afcefe1ed 100644
--- a/spec/frontend/design_management/components/design_todo_button_spec.js
+++ b/spec/frontend/design_management/components/design_todo_button_spec.js
@@ -57,7 +57,7 @@ describe('Design management design todo button', () => {
});
it('renders TodoButton component', () => {
- expect(wrapper.find(TodoButton).exists()).toBe(true);
+ expect(wrapper.findComponent(TodoButton).exists()).toBe(true);
});
describe('when design has a pending todo', () => {
diff --git a/spec/frontend/design_management/components/image_spec.js b/spec/frontend/design_management/components/image_spec.js
index 65ee0ae6238..8163cb0d87a 100644
--- a/spec/frontend/design_management/components/image_spec.js
+++ b/spec/frontend/design_management/components/image_spec.js
@@ -71,7 +71,7 @@ describe('Design management large image component', () => {
image.trigger('error');
await nextTick();
expect(image.isVisible()).toBe(false);
- expect(wrapper.find(GlIcon).element).toMatchSnapshot();
+ expect(wrapper.findComponent(GlIcon).element).toMatchSnapshot();
});
describe('zoom', () => {
diff --git a/spec/frontend/design_management/components/list/item_spec.js b/spec/frontend/design_management/components/list/item_spec.js
index e00dda2015e..66d3f883960 100644
--- a/spec/frontend/design_management/components/list/item_spec.js
+++ b/spec/frontend/design_management/components/list/item_spec.js
@@ -23,8 +23,8 @@ describe('Design management list item component', () => {
const findDesignEvent = () => wrapper.findByTestId('design-event');
const findImgFilename = (id = imgId) => wrapper.findByTestId(`design-img-filename-${id}`);
- const findEventIcon = () => findDesignEvent().find(GlIcon);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findEventIcon = () => findDesignEvent().findComponent(GlIcon);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
function createComponent({
notesCount = 0,
@@ -74,7 +74,7 @@ describe('Design management list item component', () => {
beforeEach(async () => {
createComponent();
image = wrapper.find('img');
- glIntersectionObserver = wrapper.find(GlIntersectionObserver);
+ glIntersectionObserver = wrapper.findComponent(GlIntersectionObserver);
glIntersectionObserver.vm.$emit('appear');
await nextTick();
@@ -86,7 +86,7 @@ describe('Design management list item component', () => {
describe('before image is loaded', () => {
it('renders loading spinner', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
@@ -105,7 +105,7 @@ describe('Design management list item component', () => {
image.trigger('error');
await nextTick();
expect(image.isVisible()).toBe(false);
- expect(wrapper.find(GlIcon).element).toMatchSnapshot();
+ expect(wrapper.findComponent(GlIcon).element).toMatchSnapshot();
});
describe('when imageV432x230 and image provided', () => {
diff --git a/spec/frontend/design_management/components/toolbar/index_spec.js b/spec/frontend/design_management/components/toolbar/index_spec.js
index 412f3de911e..b6137ba2eee 100644
--- a/spec/frontend/design_management/components/toolbar/index_spec.js
+++ b/spec/frontend/design_management/components/toolbar/index_spec.js
@@ -85,35 +85,35 @@ describe('Design management toolbar component', () => {
createComponent();
await nextTick();
- expect(wrapper.find(DeleteButton).exists()).toBe(true);
+ expect(wrapper.findComponent(DeleteButton).exists()).toBe(true);
});
it('does not render delete button on non-latest version', async () => {
createComponent(false, true, { isLatestVersion: false });
await nextTick();
- expect(wrapper.find(DeleteButton).exists()).toBe(false);
+ expect(wrapper.findComponent(DeleteButton).exists()).toBe(false);
});
it('does not render delete button when user is not logged in', async () => {
createComponent(false, false);
await nextTick();
- expect(wrapper.find(DeleteButton).exists()).toBe(false);
+ expect(wrapper.findComponent(DeleteButton).exists()).toBe(false);
});
it('emits `delete` event on deleteButton `delete-selected-designs` event', async () => {
createComponent();
await nextTick();
- wrapper.find(DeleteButton).vm.$emit('delete-selected-designs');
+ wrapper.findComponent(DeleteButton).vm.$emit('delete-selected-designs');
expect(wrapper.emitted().delete).toBeTruthy();
});
it('renders download button with correct link', () => {
createComponent();
- expect(wrapper.find(GlButton).attributes('href')).toBe(
+ expect(wrapper.findComponent(GlButton).attributes('href')).toBe(
'/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d',
);
});
diff --git a/spec/frontend/design_management/components/upload/button_spec.js b/spec/frontend/design_management/components/upload/button_spec.js
index d123db43ce6..59821218ab8 100644
--- a/spec/frontend/design_management/components/upload/button_spec.js
+++ b/spec/frontend/design_management/components/upload/button_spec.js
@@ -34,7 +34,7 @@ describe('Design management upload button component', () => {
it('Button `loading` prop is `true`', () => {
createComponent({ isSaving: true });
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.exists()).toBe(true);
expect(button.props('loading')).toBe(true);
});
diff --git a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
index ec5db04bb80..7c26ab9739b 100644
--- a/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
+++ b/spec/frontend/design_management/components/upload/design_version_dropdown_spec.js
@@ -46,7 +46,7 @@ describe('Design management design version dropdown component', () => {
wrapper.destroy();
});
- const findVersionLink = (index) => wrapper.findAll(GlDropdownItem).at(index);
+ const findVersionLink = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
it('renders design version dropdown button', async () => {
createComponent();
@@ -76,35 +76,35 @@ describe('Design management design version dropdown component', () => {
createComponent();
await nextTick();
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing latest version');
+ expect(wrapper.findComponent(GlDropdown).attributes('text')).toBe('Showing latest version');
});
it('displays latest version text when only 1 version is present', async () => {
createComponent({ maxVersions: 1 });
await nextTick();
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing latest version');
+ expect(wrapper.findComponent(GlDropdown).attributes('text')).toBe('Showing latest version');
});
it('displays version text when the current version is not the latest', async () => {
createComponent({ $route: designRouteFactory(PREVIOUS_VERSION_ID) });
await nextTick();
- expect(wrapper.find(GlDropdown).attributes('text')).toBe(`Showing version #1`);
+ expect(wrapper.findComponent(GlDropdown).attributes('text')).toBe(`Showing version #1`);
});
it('displays latest version text when the current version is the latest', async () => {
createComponent({ $route: designRouteFactory(LATEST_VERSION_ID) });
await nextTick();
- expect(wrapper.find(GlDropdown).attributes('text')).toBe('Showing latest version');
+ expect(wrapper.findComponent(GlDropdown).attributes('text')).toBe('Showing latest version');
});
it('should have the same length as apollo query', async () => {
createComponent();
await nextTick();
- expect(wrapper.findAll(GlDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
+ expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(wrapper.vm.allVersions.length);
});
it('should render TimeAgo', async () => {
diff --git a/spec/frontend/design_management/pages/design/index_spec.js b/spec/frontend/design_management/pages/design/index_spec.js
index 17a299c5de1..774e37a8b21 100644
--- a/spec/frontend/design_management/pages/design/index_spec.js
+++ b/spec/frontend/design_management/pages/design/index_spec.js
@@ -85,9 +85,9 @@ describe('Design management design index page', () => {
let wrapper;
let router;
- const findDiscussionForm = () => wrapper.find(DesignReplyForm);
- const findSidebar = () => wrapper.find(DesignSidebar);
- const findDesignPresentation = () => wrapper.find(DesignPresentation);
+ const findDiscussionForm = () => wrapper.findComponent(DesignReplyForm);
+ const findSidebar = () => wrapper.findComponent(DesignSidebar);
+ const findDesignPresentation = () => wrapper.findComponent(DesignPresentation);
function createComponent(
{ loading = false } = {},
@@ -181,15 +181,15 @@ describe('Design management design index page', () => {
it('sets loading state', () => {
createComponent({ loading: true });
- expect(wrapper.find(DesignPresentation).props('isLoading')).toBe(true);
- expect(wrapper.find(DesignSidebar).props('isLoading')).toBe(true);
+ expect(wrapper.findComponent(DesignPresentation).props('isLoading')).toBe(true);
+ expect(wrapper.findComponent(DesignSidebar).props('isLoading')).toBe(true);
});
it('renders design index', () => {
createComponent({ loading: false }, { data: { design } });
expect(wrapper.element).toMatchSnapshot();
- expect(wrapper.find(GlAlert).exists()).toBe(false);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
it('passes correct props to sidebar component', () => {
diff --git a/spec/frontend/design_management/pages/index_spec.js b/spec/frontend/design_management/pages/index_spec.js
index 21be7bd148b..f90feaadfb0 100644
--- a/spec/frontend/design_management/pages/index_spec.js
+++ b/spec/frontend/design_management/pages/index_spec.js
@@ -111,8 +111,8 @@ describe('Design management index page', () => {
const findDropzoneWrapper = () => wrapper.findByTestId('design-dropzone-wrapper');
const findFirstDropzoneWithDesign = () => wrapper.findAllComponents(DesignDropzone).at(1);
const findDesignsWrapper = () => wrapper.findByTestId('designs-root');
- const findDesigns = () => wrapper.findAll(Design);
- const draggableAttributes = () => wrapper.find(VueDraggable).vm.$attrs;
+ const findDesigns = () => wrapper.findAllComponents(Design);
+ const draggableAttributes = () => wrapper.findComponent(VueDraggable).vm.$attrs;
const findDesignUploadButton = () => wrapper.findByTestId('design-upload-button');
const findDesignToolbarWrapper = () => wrapper.findByTestId('design-toolbar-wrapper');
const findDesignUpdateAlert = () => wrapper.findByTestId('design-update-alert');
@@ -120,8 +120,8 @@ describe('Design management index page', () => {
async function moveDesigns(localWrapper) {
await waitForPromises();
- localWrapper.find(VueDraggable).vm.$emit('input', reorderedDesigns);
- localWrapper.find(VueDraggable).vm.$emit('change', {
+ localWrapper.findComponent(VueDraggable).vm.$emit('input', reorderedDesigns);
+ localWrapper.findComponent(VueDraggable).vm.$emit('change', {
moved: {
newIndex: 0,
element: designToMove,
@@ -369,7 +369,7 @@ describe('Design management index page', () => {
findDropzone().vm.$emit('change', [{ name: 'test' }]);
expect(mutate).toHaveBeenCalledWith(mutationVariables);
expect(wrapper.vm.filesToBeSaved).toEqual([{ name: 'test' }]);
- expect(wrapper.vm.isSaving).toBeTruthy();
+ expect(wrapper.vm.isSaving).toBe(true);
expect(dropzoneClasses()).toContain('design-list-item');
expect(dropzoneClasses()).toContain('design-list-item-new');
});
@@ -399,7 +399,7 @@ describe('Design management index page', () => {
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
- expect(wrapper.vm.isSaving).toBeFalsy();
+ expect(wrapper.vm.isSaving).toBe(false);
expect(wrapper.vm.isLatestVersion).toBe(true);
});
@@ -412,7 +412,7 @@ describe('Design management index page', () => {
wrapper.vm.onUploadDesignError();
await nextTick();
expect(wrapper.vm.filesToBeSaved).toEqual([]);
- expect(wrapper.vm.isSaving).toBeFalsy();
+ expect(wrapper.vm.isSaving).toBe(false);
expect(findDesignUpdateAlert().exists()).toBe(true);
expect(findDesignUpdateAlert().text()).toBe(UPLOAD_DESIGN_ERROR);
});
diff --git a/spec/frontend/design_management/router_spec.js b/spec/frontend/design_management/router_spec.js
index b9c62334223..b9edde559c8 100644
--- a/spec/frontend/design_management/router_spec.js
+++ b/spec/frontend/design_management/router_spec.js
@@ -44,7 +44,7 @@ describe('Design management router', () => {
it('pushes home component', () => {
const wrapper = factory(routeArg);
- expect(wrapper.find(Designs).exists()).toBe(true);
+ expect(wrapper.findComponent(Designs).exists()).toBe(true);
});
});
@@ -55,7 +55,7 @@ describe('Design management router', () => {
const wrapper = factory(routeArg);
return nextTick().then(() => {
- const detail = wrapper.find(DesignDetail);
+ const detail = wrapper.findComponent(DesignDetail);
expect(detail.exists()).toBe(true);
expect(detail.props('id')).toEqual('1');
});
diff --git a/spec/frontend/diffs/components/diff_file_header_spec.js b/spec/frontend/diffs/components/diff_file_header_spec.js
index d90afeb6b82..92b8b2d4aa3 100644
--- a/spec/frontend/diffs/components/diff_file_header_spec.js
+++ b/spec/frontend/diffs/components/diff_file_header_spec.js
@@ -263,7 +263,7 @@ describe('DiffFileHeader component', () => {
},
},
});
- expect(findModeChangedLine().exists()).toBeFalsy();
+ expect(findModeChangedLine().exists()).toBe(false);
},
);
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index be81508213b..a74013dc2d4 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -239,7 +239,7 @@ describe('DiffRow', () => {
const coverage = wrapper.find('.line-coverage.right-side');
expect(coverage.attributes('title')).toContain('Test coverage: 5 hits');
- expect(coverage.classes('coverage')).toBeTruthy();
+ expect(coverage.classes('coverage')).toBe(true);
});
it('for lines without coverage', () => {
@@ -248,7 +248,7 @@ describe('DiffRow', () => {
const coverage = wrapper.find('.line-coverage.right-side');
expect(coverage.attributes('title')).toContain('No test coverage');
- expect(coverage.classes('no-coverage')).toBeTruthy();
+ expect(coverage.classes('no-coverage')).toBe(true);
});
it('for unknown lines', () => {
@@ -256,9 +256,9 @@ describe('DiffRow', () => {
wrapper = createWrapper({ props, state: { coverageFiles } });
const coverage = wrapper.find('.line-coverage.right-side');
- expect(coverage.attributes('title')).toBeFalsy();
- expect(coverage.classes('coverage')).toBeFalsy();
- expect(coverage.classes('no-coverage')).toBeFalsy();
+ expect(coverage.attributes('title')).toBeUndefined();
+ expect(coverage.classes('coverage')).toBe(false);
+ expect(coverage.classes('no-coverage')).toBe(false);
});
});
diff --git a/spec/frontend/diffs/store/utils_spec.js b/spec/frontend/diffs/store/utils_spec.js
index 8852c6c62c5..3f870a98396 100644
--- a/spec/frontend/diffs/store/utils_spec.js
+++ b/spec/frontend/diffs/store/utils_spec.js
@@ -424,8 +424,8 @@ describe('DiffsStoreUtils', () => {
expect(firstChar).not.toBe('+');
expect(firstChar).not.toBe('-');
- expect(preparedDiff.diff_files[0].renderIt).toBeTruthy();
- expect(preparedDiff.diff_files[0].collapsed).toBeFalsy();
+ expect(preparedDiff.diff_files[0].renderIt).toBe(true);
+ expect(preparedDiff.diff_files[0].collapsed).toBe(false);
});
it('guarantees an empty array for both diff styles', () => {
@@ -506,8 +506,8 @@ describe('DiffsStoreUtils', () => {
});
it('sets the renderIt and collapsed attribute on files', () => {
- expect(preparedDiffFiles[0].renderIt).toBeTruthy();
- expect(preparedDiffFiles[0].collapsed).toBeFalsy();
+ expect(preparedDiffFiles[0].renderIt).toBe(true);
+ expect(preparedDiffFiles[0].collapsed).toBeUndefined();
});
it('guarantees an empty array of lines for both diff styles', () => {
diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js
index a633de9ef56..0fe70bac6b7 100644
--- a/spec/frontend/dropzone_input_spec.js
+++ b/spec/frontend/dropzone_input_spec.js
@@ -29,7 +29,9 @@ describe('dropzone_input', () => {
it('returns valid dropzone when successfully initialize', () => {
const dropzone = dropzoneInput($(TEMPLATE));
- expect(dropzone.version).toBeTruthy();
+ expect(dropzone).toMatchObject({
+ version: expect.any(String),
+ });
});
describe('handlePaste', () => {
diff --git a/spec/frontend/editor/schema/ci/ci_schema_spec.js b/spec/frontend/editor/schema/ci/ci_schema_spec.js
index c59806a5d60..c9010fbec0c 100644
--- a/spec/frontend/editor/schema/ci/ci_schema_spec.js
+++ b/spec/frontend/editor/schema/ci/ci_schema_spec.js
@@ -2,7 +2,7 @@ import Ajv from 'ajv';
import AjvFormats from 'ajv-formats';
import CiSchema from '~/editor/schema/ci.json';
-// JSON POSITIVE TESTS
+// JSON POSITIVE TESTS (LEGACY)
import AllowFailureJson from './json_tests/positive_tests/allow_failure.json';
import EnvironmentJson from './json_tests/positive_tests/environment.json';
import GitlabCiDependenciesJson from './json_tests/positive_tests/gitlab-ci-dependencies.json';
@@ -14,7 +14,7 @@ import TerraformReportJson from './json_tests/positive_tests/terraform_report.js
import VariablesMixStringAndUserInputJson from './json_tests/positive_tests/variables_mix_string_and_user_input.json';
import VariablesJson from './json_tests/positive_tests/variables.json';
-// JSON NEGATIVE TESTS
+// JSON NEGATIVE TESTS (LEGACY)
import DefaultNoAdditionalPropertiesJson from './json_tests/negative_tests/default_no_additional_properties.json';
import InheritDefaultNoAdditionalPropertiesJson from './json_tests/negative_tests/inherit_default_no_additional_properties.json';
import JobVariablesMustNotContainObjectsJson from './json_tests/negative_tests/job_variables_must_not_contain_objects.json';
@@ -24,14 +24,17 @@ import ReleaseAssetsLinksMissingJson from './json_tests/negative_tests/release_a
import RetryUnknownWhenJson from './json_tests/negative_tests/retry_unknown_when.json';
// YAML POSITIVE TEST
+import ArtifactsYaml from './yaml_tests/positive_tests/artifacts.yml';
import CacheYaml from './yaml_tests/positive_tests/cache.yml';
import FilterYaml from './yaml_tests/positive_tests/filter.yml';
import IncludeYaml from './yaml_tests/positive_tests/include.yml';
import RulesYaml from './yaml_tests/positive_tests/rules.yml';
// YAML NEGATIVE TEST
+import ArtifactsNegativeYaml from './yaml_tests/negative_tests/artifacts.yml';
import CacheNegativeYaml from './yaml_tests/negative_tests/cache.yml';
import IncludeNegativeYaml from './yaml_tests/negative_tests/include.yml';
+import RulesNegativeYaml from './yaml_tests/negative_tests/rules.yml';
const ajv = new Ajv({
strictTypes: false,
@@ -59,6 +62,7 @@ describe('positive tests', () => {
VariablesJson,
// YAML
+ ArtifactsYaml,
CacheYaml,
FilterYaml,
IncludeYaml,
@@ -82,8 +86,10 @@ describe('negative tests', () => {
RetryUnknownWhenJson,
// YAML
+ ArtifactsNegativeYaml,
CacheNegativeYaml,
IncludeNegativeYaml,
+ RulesNegativeYaml,
}),
)('schema validates %s', (_, input) => {
expect(input).not.toValidateJsonSchema(schema);
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
new file mode 100644
index 00000000000..f5670376efc
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
@@ -0,0 +1,18 @@
+# invalid artifact:reports:cyclonedx
+
+cyclonedx no paths:
+ artifacts:
+ reports:
+ cyclonedx:
+
+cyclonedx not a report:
+ artifacts:
+ cyclonedx: foo
+
+cyclonedx not an array or string:
+ artifacts:
+ reports:
+ cyclonedx:
+ paths:
+ - foo
+ - bar
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/cache.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/cache.yml
index ee533f54d3b..04020c06753 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/cache.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/cache.yml
@@ -1,15 +1,13 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70779
stages:
- prepare
-# invalid cache:when value
-job1:
+# invalid cache:when values
+when no integer:
stage: prepare
cache:
when: 0
-# invalid cache:when value
-job2:
+when must be a reserved word:
stage: prepare
cache:
when: 'never'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
index 287150a765f..1e16bb55405 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/include.yml
@@ -1,16 +1,14 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70779
stages:
- prepare
-# missing file property
-childPipeline:
+# invalid trigger:include
+trigger missing file property:
stage: prepare
trigger:
include:
- project: 'my-group/my-pipeline-library'
-# missing project property
-childPipeline2:
+trigger missing project property:
stage: prepare
trigger:
include:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml
new file mode 100644
index 00000000000..d74a681b23b
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml
@@ -0,0 +1,14 @@
+# invalid rules:changes
+unnecessary ref declaration:
+ script: exit 0
+ rules:
+ - changes:
+ paths:
+ - README.md
+ compare_to: { ref: 'main' }
+
+wrong path declaration:
+ script: exit 0
+ rules:
+ - changes:
+ paths: { file: 'DOCKER' }
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml
new file mode 100644
index 00000000000..20c1fc2c50f
--- /dev/null
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml
@@ -0,0 +1,25 @@
+# valid artifact:reports:cyclonedx
+
+cyclonedx string path:
+ artifacts:
+ reports:
+ cyclonedx: foo
+
+cyclonedx glob path:
+ artifacts:
+ reports:
+ cyclonedx: "*.foo"
+
+cylonedx list of string paths:
+ artifacts:
+ reports:
+ cyclonedx:
+ - foo
+ - ./bar/baz
+
+cylonedx mixed list of string paths and globs:
+ artifacts:
+ reports:
+ cyclonedx:
+ - ./foo
+ - "bar/*.baz"
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/cache.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/cache.yml
index 436c7d72699..d83e14fdc6a 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/cache.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/cache.yml
@@ -1,8 +1,7 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70779
stages:
- prepare
-# test for cache:when values
+# valid cache:when values
job1:
stage: prepare
script:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/filter.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/filter.yml
index 2b29c24fa3c..f82ea71dcf3 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/filter.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/filter.yml
@@ -1,5 +1,5 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/79335
-deploy-template:
+# valid only/except values
+only and except as array of strings:
script:
- echo "hello world"
only:
@@ -7,12 +7,10 @@ deploy-template:
except:
- bar
-# null value allowed
-deploy-without-only:
+only as null value:
extends: deploy-template
only:
-# null value allowed
-deploy-without-except:
+except as null value:
extends: deploy-template
except:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
index 3497be28058..c00ab0d464a 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/include.yml
@@ -1,17 +1,15 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/70779
+stages:
+ - prepare
-# test for include:rules
+# valid include:rules
include:
- local: builds.yml
rules:
- if: '$INCLUDE_BUILDS == "true"'
when: always
-stages:
- - prepare
-
-# test for trigger:include
-childPipeline:
+# valid trigger:include
+trigger:include accepts project and file properties:
stage: prepare
script:
- echo 'creating pipeline...'
@@ -20,8 +18,7 @@ childPipeline:
- project: 'my-group/my-pipeline-library'
file: '.gitlab-ci.yml'
-# accepts optional ref property
-childPipeline2:
+trigger:include accepts optional ref property:
stage: prepare
script:
- echo 'creating pipeline...'
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
index 27a199cff13..37cae6b4264 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
@@ -1,13 +1,28 @@
-# Covers https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74164
+# valid workflow:rules:changes
+rules:changes with paths and compare_to properties:
+ script: exit 0
+ rules:
+ - changes:
+ paths:
+ - README.md
+ compare_to: main
+
+rules:changes as array of strings:
+ script: exit 0
+ rules:
+ - changes:
+ - README.md
-# test for workflow:rules:changes and workflow:rules:exists
+# valid workflow:rules:exists
+# valid rules:changes:path
workflow:
rules:
+ - changes:
+ paths:
+ - README.md
- if: '$CI_PIPELINE_SOURCE == "schedule"'
exists:
- Dockerfile
- changes:
- - Dockerfile
variables:
IS_A_FEATURE: 'true'
when: always
diff --git a/spec/frontend/editor/source_editor_instance_spec.js b/spec/frontend/editor/source_editor_instance_spec.js
index 99c4ff4f3fa..1223fee320e 100644
--- a/spec/frontend/editor/source_editor_instance_spec.js
+++ b/spec/frontend/editor/source_editor_instance_spec.js
@@ -423,7 +423,7 @@ describe('Source Editor Instance', () => {
'changes language of an attached model to "$expectedLanguage" when filepath is "$path"',
({ path, expectedLanguage }) => {
seInstance.updateModelLanguage(path);
- expect(instanceModel.getLanguageIdentifier().language).toBe(expectedLanguage);
+ expect(instanceModel.getLanguageId()).toBe(expectedLanguage);
},
);
});
diff --git a/spec/frontend/editor/source_editor_spec.js b/spec/frontend/editor/source_editor_spec.js
index 74aae7b899b..6a8e7b296aa 100644
--- a/spec/frontend/editor/source_editor_spec.js
+++ b/spec/frontend/editor/source_editor_spec.js
@@ -267,7 +267,6 @@ describe('Base editor', () => {
let editorEl2;
let inst1;
let inst2;
- const readOnlyIndex = '78'; // readOnly option has the internal index of 78 in the editor's options
beforeEach(() => {
setHTMLFixture('<div id="editor1"></div><div id="editor2"></div>');
@@ -331,10 +330,10 @@ describe('Base editor', () => {
});
inst1 = editor.createInstance(inst1Args);
- expect(inst1.getOption(readOnlyIndex)).toBe(true);
+ expect(inst1.getRawOptions().readOnly).toBe(true);
inst2 = editor.createInstance(inst2Args);
- expect(inst2.getOption(readOnlyIndex)).toBe(true);
+ expect(inst2.getRawOptions().readOnly).toBe(true);
});
it('allows overriding editor options on the instance level', () => {
@@ -346,7 +345,7 @@ describe('Base editor', () => {
readOnly: false,
});
- expect(inst1.getOption(readOnlyIndex)).toBe(false);
+ expect(inst1.getRawOptions().readOnly).toBe(false);
});
it('disposes instances and relevant models independently from each other', () => {
diff --git a/spec/frontend/environment.js b/spec/frontend/environment.js
index dc1c1dfbe4a..1c84350bd8e 100644
--- a/spec/frontend/environment.js
+++ b/spec/frontend/environment.js
@@ -70,7 +70,6 @@ class CustomEnvironment extends JSDOMEnvironment {
//
// Monaco-related environment variables
//
- this.global.MonacoEnvironment = { globalAPI: true };
Object.defineProperty(this.global, 'matchMedia', {
writable: true,
value: (query) => ({
diff --git a/spec/frontend/environments/canary_ingress_spec.js b/spec/frontend/environments/canary_ingress_spec.js
index d58f9f9b8a2..340740e6499 100644
--- a/spec/frontend/environments/canary_ingress_spec.js
+++ b/spec/frontend/environments/canary_ingress_spec.js
@@ -10,7 +10,7 @@ describe('/environments/components/canary_ingress.vue', () => {
const setWeightTo = (weightWrapper, x) =>
weightWrapper
- .findAll(GlDropdownItem)
+ .findAllComponents(GlDropdownItem)
.at(x / 5)
.vm.$emit('click');
@@ -59,14 +59,14 @@ describe('/environments/components/canary_ingress.vue', () => {
});
it('lists options from 0 to 100 in increments of 5', () => {
- const options = stableWeightDropdown.findAll(GlDropdownItem);
+ const options = stableWeightDropdown.findAllComponents(GlDropdownItem);
expect(options).toHaveLength(21);
options.wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
});
it('is set to open the change modal', () => {
stableWeightDropdown
- .findAll(GlDropdownItem)
+ .findAllComponents(GlDropdownItem)
.wrappers.forEach((w) =>
expect(getBinding(w.element, 'gl-modal')).toMatchObject({ value: CANARY_UPDATE_MODAL }),
);
@@ -92,13 +92,13 @@ describe('/environments/components/canary_ingress.vue', () => {
it('lists options from 0 to 100 in increments of 5', () => {
canaryWeightDropdown
- .findAll(GlDropdownItem)
+ .findAllComponents(GlDropdownItem)
.wrappers.forEach((w, i) => expect(w.text()).toBe((i * 5).toString()));
});
it('is set to open the change modal', () => {
canaryWeightDropdown
- .findAll(GlDropdownItem)
+ .findAllComponents(GlDropdownItem)
.wrappers.forEach((w) =>
expect(getBinding(w.element, 'gl-modal')).toMatchObject({ value: CANARY_UPDATE_MODAL }),
);
diff --git a/spec/frontend/environments/canary_update_modal_spec.js b/spec/frontend/environments/canary_update_modal_spec.js
index 16792dcda1e..31b1770da59 100644
--- a/spec/frontend/environments/canary_update_modal_spec.js
+++ b/spec/frontend/environments/canary_update_modal_spec.js
@@ -10,7 +10,7 @@ describe('/environments/components/canary_update_modal.vue', () => {
let modal;
let mutate;
- const findAlert = () => wrapper.find(GlAlert);
+ const findAlert = () => wrapper.findComponent(GlAlert);
const createComponent = () => {
mutate = jest.fn().mockResolvedValue();
@@ -27,7 +27,7 @@ describe('/environments/components/canary_update_modal.vue', () => {
$apollo: { mutate },
},
});
- modal = wrapper.find(GlModal);
+ modal = wrapper.findComponent(GlModal);
};
afterEach(() => {
diff --git a/spec/frontend/environments/confirm_rollback_modal_spec.js b/spec/frontend/environments/confirm_rollback_modal_spec.js
index c4763933468..2163814528a 100644
--- a/spec/frontend/environments/confirm_rollback_modal_spec.js
+++ b/spec/frontend/environments/confirm_rollback_modal_spec.js
@@ -73,7 +73,7 @@ describe('Confirm Rollback Modal Component', () => {
hasMultipleCommits,
retryUrl,
});
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
expect(modal.attributes('title')).toContain('Rollback');
expect(modal.attributes('title')).toContain('test');
@@ -92,7 +92,7 @@ describe('Confirm Rollback Modal Component', () => {
hasMultipleCommits,
});
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
expect(modal.attributes('title')).toContain('Re-deploy');
expect(modal.attributes('title')).toContain('test');
@@ -110,7 +110,7 @@ describe('Confirm Rollback Modal Component', () => {
});
const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
modal.vm.$emit('ok');
expect(eventHubSpy).toHaveBeenCalledWith('rollbackEnvironment', env);
@@ -155,7 +155,7 @@ describe('Confirm Rollback Modal Component', () => {
},
{ apolloProvider },
);
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
expect(trimText(modal.text())).toContain('commit abc0123');
expect(modal.text()).toContain('Are you sure you want to continue?');
@@ -177,7 +177,7 @@ describe('Confirm Rollback Modal Component', () => {
},
{ apolloProvider },
);
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
expect(modal.attributes('title')).toContain('Rollback');
expect(modal.attributes('title')).toContain('test');
@@ -201,7 +201,7 @@ describe('Confirm Rollback Modal Component', () => {
{ apolloProvider },
);
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
expect(modal.attributes('title')).toContain('Re-deploy');
expect(modal.attributes('title')).toContain('test');
@@ -220,7 +220,7 @@ describe('Confirm Rollback Modal Component', () => {
{ apolloProvider },
);
- const modal = component.find(GlModal);
+ const modal = component.findComponent(GlModal);
modal.vm.$emit('ok');
await nextTick();
diff --git a/spec/frontend/environments/deploy_board_component_spec.js b/spec/frontend/environments/deploy_board_component_spec.js
index 4d63648dd48..c005ca22070 100644
--- a/spec/frontend/environments/deploy_board_component_spec.js
+++ b/spec/frontend/environments/deploy_board_component_spec.js
@@ -26,7 +26,9 @@ describe('Deploy Board', () => {
});
it('should render percentage with completion value provided', () => {
- expect(wrapper.find({ ref: 'percentage' }).text()).toBe(`${deployBoardMockData.completion}%`);
+ expect(wrapper.findComponent({ ref: 'percentage' }).text()).toBe(
+ `${deployBoardMockData.completion}%`,
+ );
});
it('should render total instance count', () => {
@@ -79,7 +81,9 @@ describe('Deploy Board', () => {
});
it('should render percentage with completion value provided', () => {
- expect(wrapper.find({ ref: 'percentage' }).text()).toBe(`${rolloutStatus.completion}%`);
+ expect(wrapper.findComponent({ ref: 'percentage' }).text()).toBe(
+ `${rolloutStatus.completion}%`,
+ );
});
it('should render total instance count', () => {
diff --git a/spec/frontend/environments/edit_environment_spec.js b/spec/frontend/environments/edit_environment_spec.js
index 2c8c054ccbd..0f2d6e95bf0 100644
--- a/spec/frontend/environments/edit_environment_spec.js
+++ b/spec/frontend/environments/edit_environment_spec.js
@@ -42,7 +42,7 @@ describe('~/environments/components/edit.vue', () => {
const findExternalUrlInput = () => wrapper.findByLabelText('External URL');
const findForm = () => wrapper.findByRole('form', { name: 'Edit environment' });
- const showsLoading = () => wrapper.find(GlLoadingIcon).exists();
+ const showsLoading = () => wrapper.findComponent(GlLoadingIcon).exists();
const submitForm = async (expected, response) => {
mock
diff --git a/spec/frontend/environments/environment_actions_spec.js b/spec/frontend/environments/environment_actions_spec.js
index ada79e2d415..68895b194a1 100644
--- a/spec/frontend/environments/environment_actions_spec.js
+++ b/spec/frontend/environments/environment_actions_spec.js
@@ -51,7 +51,7 @@ describe('EnvironmentActions Component', () => {
}
const findDropdownItem = (action) => {
- const buttons = wrapper.findAll(GlDropdownItem);
+ const buttons = wrapper.findAllComponents(GlDropdownItem);
return buttons.filter((button) => button.text().startsWith(action.name)).at(0);
};
@@ -62,12 +62,12 @@ describe('EnvironmentActions Component', () => {
it('should render a dropdown button with 2 icons', () => {
createComponent({}, { mountFn: mount });
- expect(wrapper.find(GlDropdown).findAll(GlIcon).length).toBe(2);
+ expect(wrapper.findComponent(GlDropdown).findAllComponents(GlIcon).length).toBe(2);
});
it('should render a dropdown button with aria-label description', () => {
createComponent();
- expect(wrapper.find(GlDropdown).attributes('aria-label')).toBe('Deploy to...');
+ expect(wrapper.findComponent(GlDropdown).attributes('aria-label')).toBe('Deploy to...');
});
it('should render a tooltip', () => {
@@ -98,11 +98,11 @@ describe('EnvironmentActions Component', () => {
});
it('should render a dropdown with the provided list of actions', () => {
- expect(wrapper.findAll(GlDropdownItem)).toHaveLength(actions.length);
+ expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(actions.length);
});
it("should render a disabled action when it's not playable", () => {
- const dropdownItems = wrapper.findAll(GlDropdownItem);
+ const dropdownItems = wrapper.findAllComponents(GlDropdownItem);
const lastDropdownItem = dropdownItems.at(dropdownItems.length - 1);
expect(lastDropdownItem.attributes('disabled')).toBe('true');
});
@@ -136,7 +136,7 @@ describe('EnvironmentActions Component', () => {
});
it('should render a dropdown button with a loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(true);
});
});
diff --git a/spec/frontend/environments/environment_delete_spec.js b/spec/frontend/environments/environment_delete_spec.js
index 057cb9858c4..530f9f55088 100644
--- a/spec/frontend/environments/environment_delete_spec.js
+++ b/spec/frontend/environments/environment_delete_spec.js
@@ -21,7 +21,7 @@ describe('External URL Component', () => {
});
};
- const findDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
describe('event hub', () => {
beforeEach(() => {
diff --git a/spec/frontend/environments/environment_item_spec.js b/spec/frontend/environments/environment_item_spec.js
index 1c86a66d9b8..dd909cf4473 100644
--- a/spec/frontend/environments/environment_item_spec.js
+++ b/spec/frontend/environments/environment_item_spec.js
@@ -88,11 +88,11 @@ describe('Environment item', () => {
it('should render user avatar with link to profile', () => {
const avatarLink = findLastDeploymentAvatarLink();
const avatar = findLastDeploymentAvatar();
- const { username, avatar_url, web_url } = environment.last_deployment.user;
+ const { username, avatar_url: src, web_url } = environment.last_deployment.user;
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
- src: avatar_url,
+ src,
entityName: username,
});
expect(avatar.attributes()).toMatchObject({
@@ -127,12 +127,12 @@ describe('Environment item', () => {
it('should render the build ID and user', () => {
const avatarLink = findUpcomingDeploymentAvatarLink();
const avatar = findUpcomingDeploymentAvatar();
- const { username, avatar_url, web_url } = environment.upcoming_deployment.user;
+ const { username, avatar_url: src, web_url } = environment.upcoming_deployment.user;
expect(findUpcomingDeploymentContent().text()).toMatchInterpolatedText('#27 by');
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
- src: avatar_url,
+ src,
entityName: username,
});
});
@@ -166,12 +166,12 @@ describe('Environment item', () => {
it('should still render the build ID and user avatar', () => {
const avatarLink = findUpcomingDeploymentAvatarLink();
const avatar = findUpcomingDeploymentAvatar();
- const { username, avatar_url, web_url } = environment.upcoming_deployment.user;
+ const { username, avatar_url: src, web_url } = environment.upcoming_deployment.user;
expect(findUpcomingDeploymentContent().text()).toMatchInterpolatedText('#27 by');
expect(avatarLink.attributes('href')).toBe(web_url);
expect(avatar.props()).toMatchObject({
- src: avatar_url,
+ src,
entityName: username,
});
});
diff --git a/spec/frontend/environments/environment_pin_spec.js b/spec/frontend/environments/environment_pin_spec.js
index 669c974ea4f..170036b5b00 100644
--- a/spec/frontend/environments/environment_pin_spec.js
+++ b/spec/frontend/environments/environment_pin_spec.js
@@ -41,7 +41,7 @@ describe('Pin Component', () => {
it('should emit onPinClick when clicked', () => {
const eventHubSpy = jest.spyOn(eventHub, '$emit');
- const item = wrapper.find(GlDropdownItem);
+ const item = wrapper.findComponent(GlDropdownItem);
item.vm.$emit('click');
@@ -74,7 +74,7 @@ describe('Pin Component', () => {
it('should emit onPinClick when clicked', () => {
jest.spyOn(mockApollo.defaultClient, 'mutate');
- const item = wrapper.find(GlDropdownItem);
+ const item = wrapper.findComponent(GlDropdownItem);
item.vm.$emit('click');
diff --git a/spec/frontend/environments/environment_rollback_spec.js b/spec/frontend/environments/environment_rollback_spec.js
index 7eff46baaf7..be61c6fcc90 100644
--- a/spec/frontend/environments/environment_rollback_spec.js
+++ b/spec/frontend/environments/environment_rollback_spec.js
@@ -44,7 +44,7 @@ describe('Rollback Component', () => {
},
},
});
- const button = wrapper.find(GlDropdownItem);
+ const button = wrapper.findComponent(GlDropdownItem);
button.vm.$emit('click');
@@ -71,7 +71,7 @@ describe('Rollback Component', () => {
},
apolloProvider,
});
- const button = wrapper.find(GlDropdownItem);
+ const button = wrapper.findComponent(GlDropdownItem);
button.vm.$emit('click');
expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
diff --git a/spec/frontend/environments/environment_stop_spec.js b/spec/frontend/environments/environment_stop_spec.js
index 358abca2f77..851e24c22cc 100644
--- a/spec/frontend/environments/environment_stop_spec.js
+++ b/spec/frontend/environments/environment_stop_spec.js
@@ -22,7 +22,7 @@ describe('Stop Component', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
describe('eventHub', () => {
beforeEach(() => {
diff --git a/spec/frontend/environments/environment_table_spec.js b/spec/frontend/environments/environment_table_spec.js
index aff6b1327f0..49a643aaac8 100644
--- a/spec/frontend/environments/environment_table_spec.js
+++ b/spec/frontend/environments/environment_table_spec.js
@@ -177,10 +177,10 @@ describe('Environment table', () => {
},
});
- wrapper.find(DeployBoard).vm.$emit('changeCanaryWeight', 40);
+ wrapper.findComponent(DeployBoard).vm.$emit('changeCanaryWeight', 40);
await nextTick();
- expect(wrapper.find(CanaryUpdateModal).props()).toMatchObject({
+ expect(wrapper.findComponent(CanaryUpdateModal).props()).toMatchObject({
weight: 40,
environment: mockItem,
});
diff --git a/spec/frontend/environments/environments_detail_header_spec.js b/spec/frontend/environments/environments_detail_header_spec.js
index 305e7385b43..4687119127d 100644
--- a/spec/frontend/environments/environments_detail_header_spec.js
+++ b/spec/frontend/environments/environments_detail_header_spec.js
@@ -1,5 +1,6 @@
import { GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import DeleteEnvironmentModal from '~/environments/components/delete_environment_modal.vue';
import EnvironmentsDetailHeader from '~/environments/components/environments_detail_header.vue';
import StopEnvironmentModal from '~/environments/components/stop_environment_modal.vue';
@@ -43,6 +44,9 @@ describe('Environments detail header component', () => {
GlSprintf,
TimeAgo,
},
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
propsData: {
canAdminEnvironment: false,
canUpdateEnvironment: false,
@@ -185,6 +189,14 @@ describe('Environments detail header component', () => {
it('displays the metrics button with correct path', () => {
expect(findMetricsButton().attributes('href')).toBe(metricsPath);
});
+
+ it('uses a gl tooltip for the title', () => {
+ const button = findMetricsButton();
+ const tooltip = getBinding(button.element, 'gl-tooltip');
+
+ expect(tooltip).toBeDefined();
+ expect(button.attributes('title')).toBe('See metrics');
+ });
});
describe('when has all admin rights', () => {
diff --git a/spec/frontend/environments/folder/environments_folder_view_spec.js b/spec/frontend/environments/folder/environments_folder_view_spec.js
index 9eb57b2682f..f8b8465cf6f 100644
--- a/spec/frontend/environments/folder/environments_folder_view_spec.js
+++ b/spec/frontend/environments/folder/environments_folder_view_spec.js
@@ -65,7 +65,7 @@ describe('Environments Folder View', () => {
});
it('should render a table with environments', () => {
- const table = wrapper.find(EnvironmentTable);
+ const table = wrapper.findComponent(EnvironmentTable);
expect(table.exists()).toBe(true);
expect(table.find('.environment-name').text()).toEqual(environmentsList[0].name);
@@ -93,7 +93,7 @@ describe('Environments Folder View', () => {
describe('pagination', () => {
it('should render pagination', () => {
- expect(wrapper.find(GlPagination).exists()).toBe(true);
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
});
it('should make an API request when changing page', () => {
@@ -126,7 +126,7 @@ describe('Environments Folder View', () => {
});
it('should not render a table', () => {
- expect(wrapper.find(EnvironmentTable).exists()).toBe(false);
+ expect(wrapper.findComponent(EnvironmentTable).exists()).toBe(false);
});
it('should render available tab with count 0', () => {
diff --git a/spec/frontend/environments/new_environment_spec.js b/spec/frontend/environments/new_environment_spec.js
index f6d970e02d8..5a1c1c7714c 100644
--- a/spec/frontend/environments/new_environment_spec.js
+++ b/spec/frontend/environments/new_environment_spec.js
@@ -40,7 +40,7 @@ describe('~/environments/components/new.vue', () => {
wrapper.destroy();
});
- const showsLoading = () => wrapper.find(GlLoadingIcon).exists();
+ const showsLoading = () => wrapper.findComponent(GlLoadingIcon).exists();
const submitForm = async (expected, response) => {
mock
diff --git a/spec/frontend/error_tracking/components/error_details_spec.js b/spec/frontend/error_tracking/components/error_details_spec.js
index 4273da6c735..732eff65495 100644
--- a/spec/frontend/error_tracking/components/error_details_spec.js
+++ b/spec/frontend/error_tracking/components/error_details_spec.js
@@ -35,7 +35,9 @@ describe('ErrorDetails', () => {
const externalUrl = 'https://sentry.io/organizations/test-sentry-nk/issues/1/?project=1';
const findInput = (name) => {
- const inputs = wrapper.findAll(GlFormInput).filter((c) => c.attributes('name') === name);
+ const inputs = wrapper
+ .findAllComponents(GlFormInput)
+ .filter((c) => c.attributes('name') === name);
return inputs.length ? inputs.at(0) : inputs;
};
@@ -44,7 +46,7 @@ describe('ErrorDetails', () => {
const findUpdateResolveStatusButton = () =>
wrapper.find('[data-testid="update-resolve-status-btn"]');
const findExternalUrl = () => wrapper.find('[data-testid="external-url-link"]');
- const findAlert = () => wrapper.find(GlAlert);
+ const findAlert = () => wrapper.findComponent(GlAlert);
function mountComponent() {
wrapper = shallowMount(ErrorDetails, {
@@ -119,9 +121,9 @@ describe('ErrorDetails', () => {
});
it('should show spinner while loading', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(GlLink).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(false);
+ expect(wrapper.findComponent(Stacktrace).exists()).toBe(false);
});
});
@@ -141,7 +143,7 @@ describe('ErrorDetails', () => {
wrapper.vm.onNoApolloResult();
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
expect(createFlash).not.toHaveBeenCalled();
expect(mocks.$apollo.queries.error.stopPolling).not.toHaveBeenCalled();
});
@@ -152,8 +154,8 @@ describe('ErrorDetails', () => {
wrapper.vm.onNoApolloResult();
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(GlLink).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(false);
expect(createFlash).toHaveBeenCalledWith({
message: 'Could not connect to Sentry. Refresh the page to try again.',
type: 'warning',
@@ -186,11 +188,11 @@ describe('ErrorDetails', () => {
});
it('should show Sentry error details without stacktrace', () => {
- expect(wrapper.find(GlLink).exists()).toBe(true);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(Stacktrace).exists()).toBe(false);
- expect(wrapper.find(GlBadge).exists()).toBe(false);
- expect(wrapper.findAll(GlButton)).toHaveLength(3);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(Stacktrace).exists()).toBe(false);
+ expect(wrapper.findComponent(GlBadge).exists()).toBe(false);
+ expect(wrapper.findAllComponents(GlButton)).toHaveLength(3);
});
describe('unsafe chars for culprit field', () => {
@@ -227,7 +229,7 @@ describe('ErrorDetails', () => {
},
});
await nextTick();
- expect(wrapper.findAll(GlBadge).length).toBe(2);
+ expect(wrapper.findAllComponents(GlBadge).length).toBe(2);
});
it('should NOT show the badge if the tag is not present', async () => {
@@ -239,7 +241,7 @@ describe('ErrorDetails', () => {
},
});
await nextTick();
- expect(wrapper.findAll(GlBadge).length).toBe(1);
+ expect(wrapper.findAllComponents(GlBadge).length).toBe(1);
});
it.each(Object.keys(severityLevel))(
@@ -253,7 +255,7 @@ describe('ErrorDetails', () => {
},
});
await nextTick();
- expect(wrapper.find(GlBadge).props('variant')).toEqual(
+ expect(wrapper.findComponent(GlBadge).props('variant')).toEqual(
severityLevelVariant[severityLevel[level]],
);
},
@@ -268,7 +270,7 @@ describe('ErrorDetails', () => {
},
});
await nextTick();
- expect(wrapper.find(GlBadge).props('variant')).toEqual(
+ expect(wrapper.findComponent(GlBadge).props('variant')).toEqual(
severityLevelVariant[severityLevel.ERROR],
);
});
@@ -278,8 +280,8 @@ describe('ErrorDetails', () => {
it('should show stacktrace', async () => {
store.state.details.loadingStacktrace = false;
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(Stacktrace).exists()).toBe(true);
expect(findAlert().exists()).toBe(false);
});
@@ -287,8 +289,8 @@ describe('ErrorDetails', () => {
store.state.details.loadingStacktrace = false;
store.getters = { 'details/sentryUrl': () => 'sentry.io', 'details/stacktrace': () => [] };
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
- expect(wrapper.find(Stacktrace).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(Stacktrace).exists()).toBe(false);
expect(findAlert().text()).toBe('No stack trace for this error');
});
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
index 7ed4e5f6b05..5f6c9ddb4d7 100644
--- a/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_actions_spec.js
@@ -35,7 +35,7 @@ describe('Error Tracking Actions', () => {
}
});
- const findButtons = () => wrapper.findAll(GlButton);
+ const findButtons = () => wrapper.findAllComponents(GlButton);
describe('when error status is unresolved', () => {
it('renders the correct actions buttons to allow ignore and resolve', async () => {
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index 23d448f3964..b7dffbbec04 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -19,13 +19,13 @@ describe('ErrorTrackingList', () => {
const findErrorListTable = () => wrapper.find('table');
const findErrorListRows = () => wrapper.findAll('tbody tr');
- const dropdownsArray = () => wrapper.findAll(GlDropdown);
- const findRecentSearchesDropdown = () => dropdownsArray().at(0).find(GlDropdown);
- const findStatusFilterDropdown = () => dropdownsArray().at(1).find(GlDropdown);
- const findSortDropdown = () => dropdownsArray().at(2).find(GlDropdown);
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findPagination = () => wrapper.find(GlPagination);
- const findErrorActions = () => wrapper.find(ErrorTrackingActions);
+ const dropdownsArray = () => wrapper.findAllComponents(GlDropdown);
+ const findRecentSearchesDropdown = () => dropdownsArray().at(0).findComponent(GlDropdown);
+ const findStatusFilterDropdown = () => dropdownsArray().at(1).findComponent(GlDropdown);
+ const findSortDropdown = () => dropdownsArray().at(2).findComponent(GlDropdown);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findPagination = () => wrapper.findComponent(GlPagination);
+ const findErrorActions = () => wrapper.findComponent(ErrorTrackingActions);
const findIntegratedDisabledAlert = () => wrapper.findByTestId('integrated-disabled-alert');
function mountComponent({
@@ -152,12 +152,12 @@ describe('ErrorTrackingList', () => {
it('each error in the list should have an action button set', () => {
findErrorListRows().wrappers.forEach((row) => {
- expect(row.find(ErrorTrackingActions).exists()).toBe(true);
+ expect(row.findComponent(ErrorTrackingActions).exists()).toBe(true);
});
});
describe('filtering', () => {
- const findSearchBox = () => wrapper.find(GlFormInput);
+ const findSearchBox = () => wrapper.findComponent(GlFormInput);
it('shows search box & sort dropdown', () => {
expect(findSearchBox().exists()).toBe(true);
@@ -222,7 +222,7 @@ describe('ErrorTrackingList', () => {
});
it('shows empty state', () => {
- expect(wrapper.find(GlEmptyState).exists()).toBe(true);
+ expect(wrapper.findComponent(GlEmptyState).exists()).toBe(true);
expect(findLoadingIcon().exists()).toBe(false);
expect(findErrorListTable().exists()).toBe(false);
expect(dropdownsArray().length).toBe(0);
@@ -327,7 +327,7 @@ describe('ErrorTrackingList', () => {
});
it('shows empty state', () => {
- expect(wrapper.find(GlEmptyState).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlEmptyState).isVisible()).toBe(true);
});
});
@@ -358,7 +358,7 @@ describe('ErrorTrackingList', () => {
});
describe('clear', () => {
- const clearRecentButton = () => wrapper.find({ ref: 'clearRecentSearches' });
+ const clearRecentButton = () => wrapper.findComponent({ ref: 'clearRecentSearches' });
it('is hidden when list empty', () => {
store.state.list.recentSearches = [];
diff --git a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
index 0b43167c19b..693fcff50ca 100644
--- a/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_entry_spec.js
@@ -36,10 +36,10 @@ describe('Stacktrace Entry', () => {
it('should render stacktrace entry collapsed', () => {
mountComponent({ lines });
- expect(wrapper.find(StackTraceEntry).exists()).toBe(true);
- expect(wrapper.find(ClipboardButton).exists()).toBe(true);
- expect(wrapper.find(GlIcon).exists()).toBe(true);
- expect(wrapper.find(FileIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(StackTraceEntry).exists()).toBe(true);
+ expect(wrapper.findComponent(ClipboardButton).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(FileIcon).exists()).toBe(true);
expect(wrapper.find('table').exists()).toBe(false);
});
@@ -56,7 +56,7 @@ describe('Stacktrace Entry', () => {
it('should hide collapse icon and render error fn name and error line when there is no code block', () => {
const extraInfo = { errorLine: 34, errorFn: 'errorFn', errorColumn: 77 };
mountComponent({ expanded: false, lines: [], ...extraInfo });
- expect(wrapper.find(GlIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
expect(trimText(findFileHeaderContent())).toContain(
`in ${extraInfo.errorFn} at line ${extraInfo.errorLine}:${extraInfo.errorColumn}`,
);
diff --git a/spec/frontend/error_tracking/components/stacktrace_spec.js b/spec/frontend/error_tracking/components/stacktrace_spec.js
index 4f4a60acba4..cd5a57f5683 100644
--- a/spec/frontend/error_tracking/components/stacktrace_spec.js
+++ b/spec/frontend/error_tracking/components/stacktrace_spec.js
@@ -33,13 +33,13 @@ describe('ErrorDetails', () => {
it('should render single Stacktrace entry', () => {
mountComponent([stackTraceEntry]);
- expect(wrapper.findAll(StackTraceEntry).length).toBe(1);
+ expect(wrapper.findAllComponents(StackTraceEntry).length).toBe(1);
});
it('should render multiple Stacktrace entry', () => {
const entriesNum = 3;
mountComponent(new Array(entriesNum).fill(stackTraceEntry));
- expect(wrapper.findAll(StackTraceEntry).length).toBe(entriesNum);
+ expect(wrapper.findAllComponents(StackTraceEntry).length).toBe(entriesNum);
});
});
});
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index 1ba5a505f57..b44af547658 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -41,23 +41,23 @@ describe('error tracking settings project dropdown', () => {
describe('empty project list', () => {
it('renders the dropdown', () => {
- expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
- expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ expect(wrapper.find('#project-dropdown').exists()).toBe(true);
+ expect(wrapper.find(GlDropdown).exists()).toBe(true);
});
it('shows helper text', () => {
- expect(wrapper.find('.js-project-dropdown-label').exists()).toBeTruthy();
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBe(true);
expect(wrapper.find('.js-project-dropdown-label').text()).toContain(
'To enable project selection',
);
});
it('does not show an error', () => {
- expect(wrapper.find('.js-project-dropdown-error').exists()).toBeFalsy();
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBe(false);
});
it('does not contain any dropdown items', () => {
- expect(wrapper.find(GlDropdownItem).exists()).toBeFalsy();
+ expect(wrapper.find(GlDropdownItem).exists()).toBe(false);
expect(wrapper.find(GlDropdown).props('text')).toBe('No projects available');
});
});
@@ -70,12 +70,12 @@ describe('error tracking settings project dropdown', () => {
});
it('renders the dropdown', () => {
- expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
- expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
+ expect(wrapper.find('#project-dropdown').exists()).toBe(true);
+ expect(wrapper.find(GlDropdown).exists()).toBe(true);
});
it('contains a number of dropdown items', () => {
- expect(wrapper.find(GlDropdownItem).exists()).toBeTruthy();
+ expect(wrapper.find(GlDropdownItem).exists()).toBe(true);
expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
});
});
@@ -89,8 +89,8 @@ describe('error tracking settings project dropdown', () => {
});
it('does not show helper text', () => {
- expect(wrapper.find('.js-project-dropdown-label').exists()).toBeFalsy();
- expect(wrapper.find('.js-project-dropdown-error').exists()).toBeFalsy();
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBe(false);
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBe(false);
});
});
@@ -105,8 +105,8 @@ describe('error tracking settings project dropdown', () => {
});
it('displays a error', () => {
- expect(wrapper.find('.js-project-dropdown-label').exists()).toBeFalsy();
- expect(wrapper.find('.js-project-dropdown-error').exists()).toBeTruthy();
+ expect(wrapper.find('.js-project-dropdown-label').exists()).toBe(false);
+ expect(wrapper.find('.js-project-dropdown-error').exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
index 4a0242b4a46..c1051a14a08 100644
--- a/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
+++ b/spec/frontend/feature_flags/components/configure_feature_flags_modal_spec.js
@@ -39,7 +39,7 @@ describe('Configure Feature Flags Modal', () => {
const findSecondaryAction = () => findGlModal().props('actionSecondary');
const findProjectNameInput = () => wrapper.find('#project_name_verification');
const findDangerGlAlert = () =>
- wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'danger');
+ wrapper.findAllComponents(GlAlert).filter((c) => c.props('variant') === 'danger');
describe('idle', () => {
afterEach(() => wrapper.destroy());
@@ -157,7 +157,7 @@ describe('Configure Feature Flags Modal', () => {
beforeEach(factory.bind(null, { isRotating: true }));
it('should disable the project name input', async () => {
- expect(findProjectNameInput().attributes('disabled')).toBeTruthy();
+ expect(findProjectNameInput().attributes('disabled')).toBe('true');
});
});
});
diff --git a/spec/frontend/feature_flags/components/empty_state_spec.js b/spec/frontend/feature_flags/components/empty_state_spec.js
index 4ac82ae44a6..e3cc6f703c4 100644
--- a/spec/frontend/feature_flags/components/empty_state_spec.js
+++ b/spec/frontend/feature_flags/components/empty_state_spec.js
@@ -57,7 +57,7 @@ describe('feature_flags/components/feature_flags_tab.vue', () => {
beforeEach(() => {
wrapper = factory();
- alerts = wrapper.findAll(GlAlert);
+ alerts = wrapper.findAllComponents(GlAlert);
});
it('should show any alerts', () => {
@@ -68,7 +68,7 @@ describe('feature_flags/components/feature_flags_tab.vue', () => {
it('should emit a dismiss event for a dismissed alert', () => {
alerts.at(0).vm.$emit('dismiss');
- expect(wrapper.find(EmptyState).emitted('dismissAlert')).toEqual([[0]]);
+ expect(wrapper.findComponent(EmptyState).emitted('dismissAlert')).toEqual([[0]]);
});
});
@@ -78,8 +78,8 @@ describe('feature_flags/components/feature_flags_tab.vue', () => {
});
it('should show a loading icon and nothing else', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.findAll(GlEmptyState)).toHaveLength(0);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findAllComponents(GlEmptyState)).toHaveLength(0);
});
});
@@ -88,7 +88,7 @@ describe('feature_flags/components/feature_flags_tab.vue', () => {
beforeEach(() => {
wrapper = factory({ errorState: true });
- emptyState = wrapper.find(GlEmptyState);
+ emptyState = wrapper.findComponent(GlEmptyState);
});
it('should show an error state if there has been an error', () => {
@@ -106,8 +106,8 @@ describe('feature_flags/components/feature_flags_tab.vue', () => {
beforeEach(() => {
wrapper = factory({ emptyState: true });
- emptyState = wrapper.find(GlEmptyState);
- emptyStateLink = emptyState.find(GlLink);
+ emptyState = wrapper.findComponent(GlEmptyState);
+ emptyStateLink = emptyState.findComponent(GlLink);
});
it('should show an empty state if it is empty', () => {
diff --git a/spec/frontend/feature_flags/components/environments_dropdown_spec.js b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
index cca472012e9..e8103df78bc 100644
--- a/spec/frontend/feature_flags/components/environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
@@ -23,7 +23,7 @@ describe('Feature flags > Environments dropdown ', () => {
});
};
- const findEnvironmentSearchInput = () => wrapper.find(GlSearchBoxByType);
+ const findEnvironmentSearchInput = () => wrapper.findComponent(GlSearchBoxByType);
const findDropdownMenu = () => wrapper.find('.dropdown-menu');
afterEach(() => {
@@ -91,7 +91,7 @@ describe('Feature flags > Environments dropdown ', () => {
describe('with received data', () => {
it('sets is loading to false', () => {
expect(wrapper.vm.isLoading).toBe(false);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('shows the suggestions', () => {
@@ -100,7 +100,7 @@ describe('Feature flags > Environments dropdown ', () => {
it('emits event when a suggestion is clicked', async () => {
const button = wrapper
- .findAll(GlButton)
+ .findAllComponents(GlButton)
.filter((b) => b.text() === 'production')
.at(0);
button.vm.$emit('click');
@@ -111,7 +111,7 @@ describe('Feature flags > Environments dropdown ', () => {
describe('on click clear button', () => {
beforeEach(async () => {
- wrapper.find(GlButton).vm.$emit('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
await nextTick();
});
@@ -137,7 +137,7 @@ describe('Feature flags > Environments dropdown ', () => {
});
it('emits create event', async () => {
- wrapper.findAll(GlButton).at(0).vm.$emit('click');
+ wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
await nextTick();
expect(wrapper.emitted('createClicked')).toEqual([['production']]);
});
diff --git a/spec/frontend/feature_flags/components/feature_flags_table_spec.js b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
index 99864a95f59..47f12f70056 100644
--- a/spec/frontend/feature_flags/components/feature_flags_table_spec.js
+++ b/spec/frontend/feature_flags/components/feature_flags_table_spec.js
@@ -119,7 +119,7 @@ describe('Feature flag table', () => {
it('should render an environments specs badge with active class', () => {
const envColumn = wrapper.find('.js-feature-flag-environments');
- expect(trimText(envColumn.find(GlBadge).text())).toBe('All Users: All Environments');
+ expect(trimText(envColumn.findComponent(GlBadge).text())).toBe('All Users: All Environments');
});
it('should render an actions column', () => {
@@ -137,7 +137,7 @@ describe('Feature flag table', () => {
beforeEach(() => {
props.featureFlags[0].update_path = props.featureFlags[0].destroy_path;
createWrapper(props);
- toggle = wrapper.find(GlToggle);
+ toggle = wrapper.findComponent(GlToggle);
spy = mockTracking('_category_', toggle.element, jest.spyOn);
});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index 3ad1225906b..7dd7c709c94 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -61,7 +61,7 @@ describe('feature flag form', () => {
it('does not render the related issues widget without the featureFlagIssuesEndpoint', () => {
factory(requiredProps);
- expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(false);
+ expect(wrapper.findComponent(RelatedIssuesRoot).exists()).toBe(false);
});
it('renders the related issues widget when the featureFlagIssuesEndpoint is provided', () => {
@@ -73,7 +73,7 @@ describe('feature flag form', () => {
},
);
- expect(wrapper.find(RelatedIssuesRoot).exists()).toBe(true);
+ expect(wrapper.findComponent(RelatedIssuesRoot).exists()).toBe(true);
});
describe('without provided data', () => {
@@ -114,7 +114,7 @@ describe('feature flag form', () => {
});
it('should show the strategy component', () => {
- const strategy = wrapper.find(Strategy);
+ const strategy = wrapper.findComponent(Strategy);
expect(strategy.exists()).toBe(true);
expect(strategy.props('strategy')).toEqual({
type: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
@@ -124,14 +124,14 @@ describe('feature flag form', () => {
});
it('should show one strategy component per strategy', () => {
- expect(wrapper.findAll(Strategy)).toHaveLength(2);
+ expect(wrapper.findAllComponents(Strategy)).toHaveLength(2);
});
it('adds an all users strategy when clicking the Add button', async () => {
- wrapper.find(GlButton).vm.$emit('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
await nextTick();
- const strategies = wrapper.findAll(Strategy);
+ const strategies = wrapper.findAllComponents(Strategy);
expect(strategies).toHaveLength(3);
expect(strategies.at(2).props('strategy')).toEqual(allUsersStrategy);
@@ -143,10 +143,10 @@ describe('feature flag form', () => {
parameters: { percentage: '30' },
scopes: [],
};
- wrapper.find(Strategy).vm.$emit('delete');
+ wrapper.findComponent(Strategy).vm.$emit('delete');
await nextTick();
- expect(wrapper.findAll(Strategy)).toHaveLength(1);
- expect(wrapper.find(Strategy).props('strategy')).not.toEqual(strategy);
+ expect(wrapper.findAllComponents(Strategy)).toHaveLength(1);
+ expect(wrapper.findComponent(Strategy).props('strategy')).not.toEqual(strategy);
});
});
});
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
index 63fa5d19982..1c0c444c296 100644
--- a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -31,17 +31,17 @@ describe('New Environments Dropdown', () => {
describe('before results', () => {
it('should show a loading icon', () => {
axiosMock.onGet(TEST_HOST).reply(() => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
- wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
return axios.waitForAll();
});
it('should not show any dropdown items', () => {
axiosMock.onGet(TEST_HOST).reply(() => {
- expect(wrapper.findAll(GlDropdownItem)).toHaveLength(0);
+ expect(wrapper.findAllComponents(GlDropdownItem)).toHaveLength(0);
});
- wrapper.find(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
return axios.waitForAll();
});
});
@@ -50,11 +50,11 @@ describe('New Environments Dropdown', () => {
let item;
beforeEach(async () => {
axiosMock.onGet(TEST_HOST).reply(200, []);
- wrapper.find(GlSearchBoxByType).vm.$emit('focus');
- wrapper.find(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', TEST_SEARCH);
await axios.waitForAll();
await nextTick();
- item = wrapper.find(GlDropdownItem);
+ item = wrapper.findComponent(GlDropdownItem);
});
it('should display a Create item label', () => {
@@ -62,7 +62,7 @@ describe('New Environments Dropdown', () => {
});
it('should display that no matching items are found', () => {
- expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(true);
+ expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(true);
});
it('should emit a new scope when selected', () => {
@@ -75,10 +75,10 @@ describe('New Environments Dropdown', () => {
let items;
beforeEach(() => {
axiosMock.onGet(TEST_HOST).reply(httpStatusCodes.OK, ['prod', 'production']);
- wrapper.find(GlSearchBoxByType).vm.$emit('focus');
- wrapper.find(GlSearchBoxByType).vm.$emit('input', 'prod');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'prod');
return axios.waitForAll().then(() => {
- items = wrapper.findAll(GlDropdownItem);
+ items = wrapper.findAllComponents(GlDropdownItem);
});
});
@@ -97,7 +97,7 @@ describe('New Environments Dropdown', () => {
});
it('should not display a message about no results', () => {
- expect(wrapper.find({ ref: 'noResults' }).exists()).toBe(false);
+ expect(wrapper.findComponent({ ref: 'noResults' }).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/feature_flags/components/new_feature_flag_spec.js b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
index 688ba54f919..300d0e47082 100644
--- a/spec/frontend/feature_flags/components/new_feature_flag_spec.js
+++ b/spec/frontend/feature_flags/components/new_feature_flag_spec.js
@@ -40,7 +40,7 @@ describe('New feature flag form', () => {
};
const findWarningGlAlert = () =>
- wrapper.findAll(GlAlert).filter((c) => c.props('variant') === 'warning');
+ wrapper.findAllComponents(GlAlert).filter((c) => c.props('variant') === 'warning');
beforeEach(() => {
factory();
@@ -65,11 +65,11 @@ describe('New feature flag form', () => {
});
it('should render feature flag form', () => {
- expect(wrapper.find(Form).exists()).toEqual(true);
+ expect(wrapper.findComponent(Form).exists()).toEqual(true);
});
it('has an all users strategy by default', () => {
- const strategies = wrapper.find(Form).props('strategies');
+ const strategies = wrapper.findComponent(Form).props('strategies');
expect(strategies).toEqual([allUsersStrategy]);
});
diff --git a/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
index 56b14d80ab3..70a9156b5a9 100644
--- a/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/flexible_rollout_spec.js
@@ -34,12 +34,12 @@ describe('feature_flags/components/strategies/flexible_rollout.vue', () => {
percentageFormGroup = wrapper
.find('[data-testid="strategy-flexible-rollout-percentage"]')
- .find(ParameterFormGroup);
- percentageInput = percentageFormGroup.find(GlFormInput);
+ .findComponent(ParameterFormGroup);
+ percentageInput = percentageFormGroup.findComponent(GlFormInput);
stickinessFormGroup = wrapper
.find('[data-testid="strategy-flexible-rollout-stickiness"]')
- .find(ParameterFormGroup);
- stickinessSelect = stickinessFormGroup.find(GlFormSelect);
+ .findComponent(ParameterFormGroup);
+ stickinessSelect = stickinessFormGroup.findComponent(GlFormSelect);
});
it('displays the current percentage value', () => {
@@ -94,7 +94,7 @@ describe('feature_flags/components/strategies/flexible_rollout.vue', () => {
it('shows errors', () => {
const formGroup = wrapper
.find('[data-testid="strategy-flexible-rollout-percentage"]')
- .find(ParameterFormGroup);
+ .findComponent(ParameterFormGroup);
expect(formGroup.attributes('state')).toBeUndefined();
});
@@ -108,7 +108,7 @@ describe('feature_flags/components/strategies/flexible_rollout.vue', () => {
it('shows errors', () => {
const formGroup = wrapper
.find('[data-testid="strategy-flexible-rollout-percentage"]')
- .find(ParameterFormGroup);
+ .findComponent(ParameterFormGroup);
expect(formGroup.attributes('state')).toBeUndefined();
});
diff --git a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
index 3b69194494f..96b9434f3ec 100644
--- a/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/gitlab_user_list_spec.js
@@ -24,10 +24,10 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
propsData: { ...DEFAULT_PROPS, ...props },
});
- const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
describe('with user lists', () => {
- const findDropdownItem = () => wrapper.find(GlDropdownItem);
+ const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
beforeEach(() => {
Api.searchFeatureFlagUserLists.mockResolvedValue({ data: [userList] });
@@ -69,10 +69,10 @@ describe('~/feature_flags/components/strategies/gitlab_user_list.vue', () => {
r = resolve;
}),
);
- const searchWrapper = wrapper.find(GlSearchBoxByType);
+ const searchWrapper = wrapper.findComponent(GlSearchBoxByType);
searchWrapper.vm.$emit('input', 'new');
await nextTick();
- const loadingIcon = wrapper.find(GlLoadingIcon);
+ const loadingIcon = wrapper.findComponent(GlLoadingIcon);
expect(loadingIcon.exists()).toBe(true);
expect(Api.searchFeatureFlagUserLists).toHaveBeenCalledWith('1', 'new');
diff --git a/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js b/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js
index 33696064d55..23ad0d3a08d 100644
--- a/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/parameter_form_group_spec.js
@@ -20,7 +20,7 @@ describe('~/feature_flags/strategies/parameter_form_group.vue', () => {
},
});
- formGroup = wrapper.find(GlFormGroup);
+ formGroup = wrapper.findComponent(GlFormGroup);
slot = wrapper.find('[data-testid="slot"]');
});
diff --git a/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js b/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js
index 180697e93e4..cb422a018f9 100644
--- a/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/percent_rollout_spec.js
@@ -30,8 +30,8 @@ describe('~/feature_flags/components/strategies/percent_rollout.vue', () => {
beforeEach(() => {
wrapper = factory();
- input = wrapper.find(GlFormInput);
- formGroup = wrapper.find(ParameterFormGroup);
+ input = wrapper.findComponent(GlFormInput);
+ formGroup = wrapper.findComponent(ParameterFormGroup);
});
it('displays the current value', () => {
@@ -55,8 +55,8 @@ describe('~/feature_flags/components/strategies/percent_rollout.vue', () => {
beforeEach(() => {
wrapper = factory({ strategy: { parameters: { percentage: '101' } } });
- input = wrapper.find(GlFormInput);
- formGroup = wrapper.find(ParameterFormGroup);
+ input = wrapper.findComponent(GlFormInput);
+ formGroup = wrapper.findComponent(ParameterFormGroup);
});
it('shows errors', () => {
@@ -68,8 +68,8 @@ describe('~/feature_flags/components/strategies/percent_rollout.vue', () => {
beforeEach(() => {
wrapper = factory({ strategy: { parameters: { percentage: '3.14' } } });
- input = wrapper.find(GlFormInput);
- formGroup = wrapper.find(ParameterFormGroup);
+ input = wrapper.findComponent(GlFormInput);
+ formGroup = wrapper.findComponent(ParameterFormGroup);
});
it('shows errors', () => {
diff --git a/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js b/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js
index 745fbca00fe..0a72714c22a 100644
--- a/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js
+++ b/spec/frontend/feature_flags/components/strategies/users_with_id_spec.js
@@ -15,7 +15,7 @@ describe('~/feature_flags/components/users_with_id.vue', () => {
beforeEach(() => {
wrapper = factory();
- textarea = wrapper.find(GlFormTextarea);
+ textarea = wrapper.findComponent(GlFormTextarea);
});
afterEach(() => {
diff --git a/spec/frontend/feature_flags/components/strategy_parameters_spec.js b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
index 979ca255b08..d0f1f7d0e2a 100644
--- a/spec/frontend/feature_flags/components/strategy_parameters_spec.js
+++ b/spec/frontend/feature_flags/components/strategy_parameters_spec.js
@@ -51,11 +51,11 @@ describe('~/feature_flags/components/strategy_parameters.vue', () => {
});
it('should show the correct component', () => {
- expect(wrapper.find(component).exists()).toBe(true);
+ expect(wrapper.findComponent(component).exists()).toBe(true);
});
it('should emit changes from the lower component', () => {
- const strategyParameterWrapper = wrapper.find(component);
+ const strategyParameterWrapper = wrapper.findComponent(component);
strategyParameterWrapper.vm.$emit('change', { parameters: { foo: 'bar' } });
@@ -77,7 +77,7 @@ describe('~/feature_flags/components/strategy_parameters.vue', () => {
strategy,
});
- expect(wrapper.find(UsersWithId).props('strategy')).toEqual(strategy);
+ expect(wrapper.findComponent(UsersWithId).props('strategy')).toEqual(strategy);
});
});
});
diff --git a/spec/frontend/feature_flags/components/strategy_spec.js b/spec/frontend/feature_flags/components/strategy_spec.js
index aee3873721c..84d4180fe63 100644
--- a/spec/frontend/feature_flags/components/strategy_spec.js
+++ b/spec/frontend/feature_flags/components/strategy_spec.js
@@ -32,8 +32,8 @@ Vue.use(Vuex);
describe('Feature flags strategy', () => {
let wrapper;
- const findStrategyParameters = () => wrapper.find(StrategyParameters);
- const findDocsLinks = () => wrapper.findAll(GlLink);
+ const findStrategyParameters = () => wrapper.findComponent(StrategyParameters);
+ const findDocsLinks = () => wrapper.findAllComponents(GlLink);
const factory = (
opts = {
@@ -93,7 +93,7 @@ describe('Feature flags strategy', () => {
});
it('should set the select to match the strategy name', () => {
- expect(wrapper.find(GlFormSelect).element.value).toBe(name);
+ expect(wrapper.findComponent(GlFormSelect).element.value).toBe(name);
});
it('should emit a change if the parameters component does', () => {
@@ -118,7 +118,7 @@ describe('Feature flags strategy', () => {
});
it('shows an alert asking users to consider using flexibleRollout instead', () => {
- expect(wrapper.find(GlAlert).text()).toContain(
+ expect(wrapper.findComponent(GlAlert).text()).toContain(
'Consider using the more flexible "Percent rollout" strategy instead.',
);
});
@@ -139,10 +139,10 @@ describe('Feature flags strategy', () => {
});
it('should revert to all-environments scope when last scope is removed', async () => {
- const token = wrapper.find(GlToken);
+ const token = wrapper.findComponent(GlToken);
token.vm.$emit('close');
await nextTick();
- expect(wrapper.findAll(GlToken)).toHaveLength(0);
+ expect(wrapper.findAllComponents(GlToken)).toHaveLength(0);
expect(last(wrapper.emitted('change'))).toEqual([
{
name: ROLLOUT_STRATEGY_PERCENT_ROLLOUT,
@@ -167,7 +167,7 @@ describe('Feature flags strategy', () => {
});
it('should change the parameters if a different strategy is chosen', async () => {
- const select = wrapper.find(GlFormSelect);
+ const select = wrapper.findComponent(GlFormSelect);
select.setValue(ROLLOUT_STRATEGY_ALL_USERS);
await nextTick();
expect(last(wrapper.emitted('change'))).toEqual([
@@ -180,26 +180,26 @@ describe('Feature flags strategy', () => {
});
it('should display selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
await nextTick();
- expect(wrapper.findAll(GlToken)).toHaveLength(1);
- expect(wrapper.find(GlToken).text()).toBe('production');
+ expect(wrapper.findAllComponents(GlToken)).toHaveLength(1);
+ expect(wrapper.findComponent(GlToken).text()).toBe('production');
});
it('should display all selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
dropdown.vm.$emit('add', 'staging');
await nextTick();
- const tokens = wrapper.findAll(GlToken);
+ const tokens = wrapper.findAllComponents(GlToken);
expect(tokens).toHaveLength(2);
expect(tokens.at(0).text()).toBe('production');
expect(tokens.at(1).text()).toBe('staging');
});
it('should emit selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
await nextTick();
expect(last(wrapper.emitted('change'))).toEqual([
@@ -215,7 +215,7 @@ describe('Feature flags strategy', () => {
});
it('should emit a delete if the delete button is clicked', () => {
- wrapper.find(GlButton).vm.$emit('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
expect(wrapper.emitted('delete')).toEqual([[]]);
});
});
@@ -232,26 +232,26 @@ describe('Feature flags strategy', () => {
});
it('should display selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
await nextTick();
- expect(wrapper.findAll(GlToken)).toHaveLength(1);
- expect(wrapper.find(GlToken).text()).toBe('production');
+ expect(wrapper.findAllComponents(GlToken)).toHaveLength(1);
+ expect(wrapper.findComponent(GlToken).text()).toBe('production');
});
it('should display all selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
dropdown.vm.$emit('add', 'staging');
await nextTick();
- const tokens = wrapper.findAll(GlToken);
+ const tokens = wrapper.findAllComponents(GlToken);
expect(tokens).toHaveLength(2);
expect(tokens.at(0).text()).toBe('production');
expect(tokens.at(1).text()).toBe('staging');
});
it('should emit selected scopes', async () => {
- const dropdown = wrapper.find(NewEnvironmentsDropdown);
+ const dropdown = wrapper.findComponent(NewEnvironmentsDropdown);
dropdown.vm.$emit('add', 'production');
await nextTick();
expect(last(wrapper.emitted('change'))).toEqual([
diff --git a/spec/frontend/fixtures/integrations.rb b/spec/frontend/fixtures/integrations.rb
index 1bafb0bfe78..45d1c400f5d 100644
--- a/spec/frontend/fixtures/integrations.rb
+++ b/spec/frontend/fixtures/integrations.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::Settings::IntegrationsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
- let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' ) }
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'integrations-project') }
let!(:service) { create(:custom_issue_tracker_integration, project: project) }
let(:user) { project.first_owner }
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index 8bedb802242..cde796497d4 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -106,3 +106,43 @@ RSpec.describe API::Issues, '(JavaScript fixtures)', type: :request do
expect(response).to be_successful
end
end
+
+RSpec.describe GraphQL::Query, type: :request do
+ include ApiHelpers
+ include GraphqlHelpers
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue_type) { 'issue' }
+
+ before_all do
+ project.add_reporter(user)
+ end
+
+ issue_popover_query_path = 'issuable/popover/queries/issue.query.graphql'
+
+ it "graphql/#{issue_popover_query_path}.json" do
+ query = get_graphql_query_as_string(issue_popover_query_path, ee: Gitlab.ee?)
+
+ issue = create(
+ :issue,
+ project: project,
+ confidential: true,
+ created_at: Time.parse('2020-07-01T04:08:01Z'),
+ due_date: Date.new(2020, 7, 5),
+ milestone: create(
+ :milestone,
+ project: project,
+ title: '15.2',
+ start_date: Date.new(2020, 7, 1),
+ due_date: Date.new(2020, 7, 30)
+ ),
+ issue_type: issue_type
+ )
+
+ post_graphql(query, current_user: user, variables: { projectPath: project.full_path, iid: issue.iid.to_s })
+
+ expect_graphql_errors_to_be_empty
+ end
+end
diff --git a/spec/frontend/fixtures/namespaces.rb b/spec/frontend/fixtures/namespaces.rb
new file mode 100644
index 00000000000..b11f661fe09
--- /dev/null
+++ b/spec/frontend/fixtures/namespaces.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Jobs (JavaScript fixtures)' do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+ include GraphqlHelpers
+
+ describe GraphQL::Query, type: :request do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:groups) { create_list(:group, 4) }
+
+ before_all do
+ groups.each { |group| group.add_owner(user) }
+ end
+
+ query_name = 'search_namespaces_where_user_can_transfer_projects'
+ query_extension = '.query.graphql'
+
+ full_input_path = "projects/settings/graphql/queries/#{query_name}#{query_extension}"
+ base_output_path = "graphql/projects/settings/#{query_name}"
+
+ it "#{base_output_path}_page_1#{query_extension}.json" do
+ query = get_graphql_query_as_string(full_input_path)
+
+ post_graphql(query, current_user: user, variables: { first: 2 })
+
+ expect_graphql_errors_to_be_empty
+ end
+
+ it "#{base_output_path}_page_2#{query_extension}.json" do
+ query = get_graphql_query_as_string(full_input_path)
+
+ post_graphql(query, current_user: user, variables: { first: 2 })
+
+ post_graphql(
+ query,
+ current_user: user,
+ variables: { first: 2, after: graphql_data_at('currentUser', 'groups', 'pageInfo', 'endCursor') }
+ )
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/prometheus_integration.rb b/spec/frontend/fixtures/prometheus_integration.rb
index 883dbb929a2..250c50bc8bb 100644
--- a/spec/frontend/fixtures/prometheus_integration.rb
+++ b/spec/frontend/fixtures/prometheus_integration.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::Settings::IntegrationsController, '(JavaScript fixtures)', type: :controller do
include JavaScriptFixturesHelpers
- let(:namespace) { create(:namespace, name: 'frontend-fixtures' )}
+ let(:namespace) { create(:namespace, name: 'frontend-fixtures' ) }
let(:project) { create(:project_empty_repo, namespace: namespace, path: 'integrations-project') }
let!(:integration) { create(:prometheus_integration, project: project) }
let(:user) { project.first_owner }
diff --git a/spec/frontend/fixtures/runner.rb b/spec/frontend/fixtures/runner.rb
index 36281af0219..b523650dda5 100644
--- a/spec/frontend/fixtures/runner.rb
+++ b/spec/frontend/fixtures/runner.rb
@@ -13,11 +13,12 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:project_2) { create(:project, :repository, :public) }
- let_it_be(:instance_runner) { create(:ci_runner, :instance, version: '1.0.0', revision: '123', description: 'Instance runner', ip_address: '127.0.0.1') }
- let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], active: false, version: '2.0.0', revision: '456', description: 'Group runner', ip_address: '127.0.0.1') }
- let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group], active: false, version: '2.0.0', revision: '456', description: 'Group runner 2', ip_address: '127.0.0.1') }
- let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project, project_2], active: false, version: '2.0.0', revision: '456', description: 'Project runner', ip_address: '127.0.0.1') }
- let_it_be(:build) { create(:ci_build, runner: instance_runner) }
+ let_it_be(:runner) { create(:ci_runner, :instance, description: 'My Runner', version: '1.0.0') }
+ let_it_be(:group_runner) { create(:ci_runner, :group, groups: [group], version: '2.0.0') }
+ let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group], version: '2.0.0') }
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project, project_2], version: '2.0.0') }
+
+ let_it_be(:build) { create(:ci_build, runner: runner) }
query_path = 'runner/graphql/'
fixtures_path = 'graphql/runner/'
@@ -27,18 +28,19 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
before do
- allow(Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
- .and_return({ not_available: nil })
+ allow_next_instance_of(::Gitlab::Ci::RunnerUpgradeCheck) do |instance|
+ allow(instance).to receive(:check_runner_upgrade_suggestion)
+ .and_return([nil, :not_available])
+ end
end
- describe do
+ describe 'as admin', GraphQL::Query do
before do
sign_in(admin)
enable_admin_mode!(admin)
end
- describe GraphQL::Query, type: :request do
+ describe 'all_runners.query.graphql', type: :request do
all_runners_query = 'list/all_runners.query.graphql'
let_it_be(:query) do
@@ -58,7 +60,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe GraphQL::Query, type: :request do
+ describe 'all_runners_count.query.graphql', type: :request do
all_runners_count_query = 'list/all_runners_count.query.graphql'
let_it_be(:query) do
@@ -72,7 +74,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe GraphQL::Query, type: :request do
+ describe 'runner.query.graphql', type: :request do
runner_query = 'show/runner.query.graphql'
let_it_be(:query) do
@@ -81,7 +83,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
it "#{fixtures_path}#{runner_query}.json" do
post_graphql(query, current_user: admin, variables: {
- id: instance_runner.to_global_id.to_s
+ id: runner.to_global_id.to_s
})
expect_graphql_errors_to_be_empty
@@ -96,7 +98,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe GraphQL::Query, type: :request do
+ describe 'runner_projects.query.graphql', type: :request do
runner_projects_query = 'show/runner_projects.query.graphql'
let_it_be(:query) do
@@ -112,7 +114,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe GraphQL::Query, type: :request do
+ describe 'runner_jobs.query.graphql', type: :request do
runner_jobs_query = 'show/runner_jobs.query.graphql'
let_it_be(:query) do
@@ -121,14 +123,14 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
it "#{fixtures_path}#{runner_jobs_query}.json" do
post_graphql(query, current_user: admin, variables: {
- id: instance_runner.to_global_id.to_s
+ id: runner.to_global_id.to_s
})
expect_graphql_errors_to_be_empty
end
end
- describe GraphQL::Query, type: :request do
+ describe 'runner_form.query.graphql', type: :request do
runner_jobs_query = 'edit/runner_form.query.graphql'
let_it_be(:query) do
@@ -137,7 +139,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
it "#{fixtures_path}#{runner_jobs_query}.json" do
post_graphql(query, current_user: admin, variables: {
- id: instance_runner.to_global_id.to_s
+ id: runner.to_global_id.to_s
})
expect_graphql_errors_to_be_empty
@@ -145,14 +147,14 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe do
+ describe 'as group owner', GraphQL::Query do
let_it_be(:group_owner) { create(:user) }
before do
group.add_owner(group_owner)
end
- describe GraphQL::Query, type: :request do
+ describe 'group_runners.query.graphql', type: :request do
group_runners_query = 'list/group_runners.query.graphql'
let_it_be(:query) do
@@ -177,7 +179,7 @@ RSpec.describe 'Runner (JavaScript fixtures)' do
end
end
- describe GraphQL::Query, type: :request do
+ describe 'group_runners_count.query.graphql', type: :request do
group_runners_count_query = 'list/group_runners_count.query.graphql'
let_it_be(:query) do
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index 32c66c0d288..c201bbf4af2 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -145,11 +145,16 @@ describe('Frequent Items App Component', () => {
expect(findFrequentItemsList().props()).toEqual(
expect.objectContaining({
items: mockSearchedProjects.data.map(
- ({ avatar_url, web_url, name_with_namespace, ...item }) => ({
+ ({
+ avatar_url: avatarUrl,
+ web_url: webUrl,
+ name_with_namespace: namespace,
+ ...item
+ }) => ({
...item,
- avatarUrl: avatar_url,
- webUrl: web_url,
- namespace: name_with_namespace,
+ avatarUrl,
+ webUrl,
+ namespace,
}),
),
namespace: TEST_NAMESPACE,
diff --git a/spec/frontend/gfm_auto_complete/mock_data.js b/spec/frontend/gfm_auto_complete/mock_data.js
new file mode 100644
index 00000000000..86795ffd0a5
--- /dev/null
+++ b/spec/frontend/gfm_auto_complete/mock_data.js
@@ -0,0 +1,34 @@
+export const eventlistenersMockDefaultMap = [
+ {
+ key: 'shown',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-users',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-issues',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-milestones',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-mergerequests',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-labels',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-snippets',
+ namespace: 'atwho',
+ },
+ {
+ key: 'shown-contacts',
+ namespace: 'atwho',
+ },
+];
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 072cf34d0ef..c3dfc4570f9 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -10,6 +10,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import AjaxCache from '~/lib/utils/ajax_cache';
import axios from '~/lib/utils/axios_utils';
+import { eventlistenersMockDefaultMap } from 'ee_else_ce_jest/gfm_auto_complete/mock_data';
describe('GfmAutoComplete', () => {
const fetchDataMock = { fetchData: jest.fn() };
@@ -457,12 +458,12 @@ describe('GfmAutoComplete', () => {
it('should be false with actual array data', () => {
expect(
- GfmAutoComplete.isLoading([{ title: 'Foo' }, { title: 'Bar' }, { title: 'Qux' }]),
+ GfmAutoComplete.isLoading([{ title: 'events' }, { title: 'Bar' }, { title: 'Qux' }]),
).toBe(false);
});
it('should be false with actual data item', () => {
- expect(GfmAutoComplete.isLoading({ title: 'Foo' })).toBe(false);
+ expect(GfmAutoComplete.isLoading({ title: 'events' })).toBe(false);
});
});
@@ -884,4 +885,47 @@ describe('GfmAutoComplete', () => {
).toBe(`<li><small>${escapedPayload} ${escapedPayload}</small> ${escapedPayload}</li>`);
});
});
+
+ describe('autocomplete show eventlisteners', () => {
+ let $textarea;
+
+ beforeEach(() => {
+ setHTMLFixture('<textarea></textarea>');
+ $textarea = $('textarea');
+ });
+
+ it('sets correct eventlisteners when autocomplete features are enabled', () => {
+ const autocomplete = new GfmAutoComplete({});
+ autocomplete.setup($textarea);
+ autocomplete.setupAtWho($textarea);
+ /* eslint-disable-next-line no-underscore-dangle */
+ const events = $._data($textarea[0], 'events');
+ expect(
+ Object.keys(events)
+ .filter((x) => {
+ return x.startsWith('shown');
+ })
+ .map((e) => {
+ return { key: e, namespace: events[e][0].namespace };
+ }),
+ ).toEqual(expect.arrayContaining(eventlistenersMockDefaultMap));
+ });
+
+ it('sets no eventlisteners when features are disabled', () => {
+ const autocomplete = new GfmAutoComplete({});
+ autocomplete.setup($textarea, {});
+ autocomplete.setupAtWho($textarea);
+ /* eslint-disable-next-line no-underscore-dangle */
+ const events = $._data($textarea[0], 'events');
+ expect(
+ Object.keys(events)
+ .filter((x) => {
+ return x.startsWith('shown');
+ })
+ .map((e) => {
+ return { key: e, namespace: events[e][0].namespace };
+ }),
+ ).toStrictEqual([]);
+ });
+ });
});
diff --git a/spec/frontend/group_settings/components/shared_runners_form_spec.js b/spec/frontend/group_settings/components/shared_runners_form_spec.js
index 70a22c86e62..5282c0ed839 100644
--- a/spec/frontend/group_settings/components/shared_runners_form_spec.js
+++ b/spec/frontend/group_settings/components/shared_runners_form_spec.js
@@ -1,24 +1,24 @@
import { GlAlert } from '@gitlab/ui';
-import MockAxiosAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import SharedRunnersForm from '~/group_settings/components/shared_runners_form.vue';
-import axios from '~/lib/utils/axios_utils';
+import { updateGroup } from '~/api/groups_api';
-const UPDATE_PATH = '/test/update';
+jest.mock('~/api/groups_api');
+
+const GROUP_ID = '99';
const RUNNER_ENABLED_VALUE = 'enabled';
const RUNNER_DISABLED_VALUE = 'disabled_and_unoverridable';
const RUNNER_ALLOW_OVERRIDE_VALUE = 'disabled_with_override';
describe('group_settings/components/shared_runners_form', () => {
let wrapper;
- let mock;
const createComponent = (provide = {}) => {
wrapper = shallowMountExtended(SharedRunnersForm, {
provide: {
- updatePath: UPDATE_PATH,
+ groupId: GROUP_ID,
sharedRunnersSetting: RUNNER_ENABLED_VALUE,
parentSharedRunnersSetting: null,
runnerEnabledValue: RUNNER_ENABLED_VALUE,
@@ -36,18 +36,19 @@ describe('group_settings/components/shared_runners_form', () => {
.at(0);
const findSharedRunnersToggle = () => wrapper.findByTestId('shared-runners-toggle');
const findOverrideToggle = () => wrapper.findByTestId('override-runners-toggle');
- const getSharedRunnersSetting = () => JSON.parse(mock.history.put[0].data).shared_runners_setting;
+ const getSharedRunnersSetting = () => {
+ return updateGroup.mock.calls[0][1].shared_runners_setting;
+ };
beforeEach(() => {
- mock = new MockAxiosAdapter(axios);
- mock.onPut(UPDATE_PATH).reply(200);
+ updateGroup.mockResolvedValue({});
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
- mock.restore();
+ updateGroup.mockReset();
});
describe('default state', () => {
@@ -115,7 +116,7 @@ describe('group_settings/components/shared_runners_form', () => {
findSharedRunnersToggle().vm.$emit('change', false);
await waitForPromises();
- expect(mock.history.put.length).toBe(1);
+ expect(updateGroup).toHaveBeenCalledTimes(1);
});
it('is not loading state after completed request', async () => {
@@ -170,12 +171,14 @@ describe('group_settings/components/shared_runners_form', () => {
});
describe.each`
- errorObj | message
+ errorData | message
${{}} | ${'An error occurred while updating configuration. Refresh the page and try again.'}
${{ error: 'Undefined error' }} | ${'Undefined error Refresh the page and try again.'}
- `(`with error $errorObj`, ({ errorObj, message }) => {
+ `(`with error $errorObj`, ({ errorData, message }) => {
beforeEach(async () => {
- mock.onPut(UPDATE_PATH).reply(500, errorObj);
+ updateGroup.mockRejectedValue({
+ response: { data: errorData },
+ });
createComponent();
findSharedRunnersToggle().vm.$emit('change', false);
diff --git a/spec/frontend/groups/components/app_spec.js b/spec/frontend/groups/components/app_spec.js
index 9e4666ffc70..a6bbea648d2 100644
--- a/spec/frontend/groups/components/app_spec.js
+++ b/spec/frontend/groups/components/app_spec.js
@@ -85,30 +85,6 @@ describe('AppComponent', () => {
await nextTick();
});
- describe('computed', () => {
- describe('groups', () => {
- it('should return list of groups from store', () => {
- jest.spyOn(vm.store, 'getGroups').mockImplementation(() => {});
-
- const { groups } = vm;
-
- expect(vm.store.getGroups).toHaveBeenCalled();
- expect(groups).not.toBeDefined();
- });
- });
-
- describe('pageInfo', () => {
- it('should return pagination info from store', () => {
- jest.spyOn(vm.store, 'getPaginationInfo').mockImplementation(() => {});
-
- const { pageInfo } = vm;
-
- expect(vm.store.getPaginationInfo).toHaveBeenCalled();
- expect(pageInfo).not.toBeDefined();
- });
- });
- });
-
describe('methods', () => {
describe('fetchGroups', () => {
it('should call `getGroups` with all the params provided', () => {
diff --git a/spec/frontend/groups/components/group_item_spec.js b/spec/frontend/groups/components/group_item_spec.js
index 0bc80df6535..9906f62878f 100644
--- a/spec/frontend/groups/components/group_item_spec.js
+++ b/spec/frontend/groups/components/group_item_spec.js
@@ -6,19 +6,20 @@ import ItemActions from '~/groups/components/item_actions.vue';
import eventHub from '~/groups/event_hub';
import { getGroupItemMicrodata } from '~/groups/store/utils';
import * as urlUtilities from '~/lib/utils/url_utility';
+import { ITEM_TYPE } from '~/groups/constants';
import {
- ITEM_TYPE,
- VISIBILITY_INTERNAL,
- VISIBILITY_PRIVATE,
- VISIBILITY_PUBLIC,
-} from '~/groups/constants';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
+ VISIBILITY_LEVEL_PRIVATE,
+ VISIBILITY_LEVEL_INTERNAL,
+ VISIBILITY_LEVEL_PUBLIC,
+} from '~/visibility_level/constants';
+import { helpPagePath } from '~/helpers/help_page_helper';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import { mockParentGroupItem, mockChildren } from '../mock_data';
const createComponent = (
propsData = { group: mockParentGroupItem, parentGroup: mockChildren[0] },
provide = {
- currentGroupVisibility: VISIBILITY_PRIVATE,
+ currentGroupVisibility: VISIBILITY_LEVEL_PRIVATE,
},
) => {
return mountExtended(GroupItem, {
@@ -289,7 +290,7 @@ describe('GroupItemComponent', () => {
});
describe('visibility warning popover', () => {
- const findPopover = () => wrapper.findComponent(GlPopover);
+ const findPopover = () => extendedWrapper(wrapper.findComponent(GlPopover));
const itDoesNotRenderVisibilityWarningPopover = () => {
it('does not render visibility warning popover', () => {
@@ -319,13 +320,16 @@ describe('GroupItemComponent', () => {
describe('when showing projects', () => {
describe.each`
- itemVisibility | currentGroupVisibility | isPopoverShown
- ${VISIBILITY_PRIVATE} | ${VISIBILITY_PUBLIC} | ${false}
- ${VISIBILITY_INTERNAL} | ${VISIBILITY_PUBLIC} | ${false}
- ${VISIBILITY_PUBLIC} | ${VISIBILITY_PUBLIC} | ${false}
- ${VISIBILITY_PRIVATE} | ${VISIBILITY_PRIVATE} | ${false}
- ${VISIBILITY_INTERNAL} | ${VISIBILITY_PRIVATE} | ${true}
- ${VISIBILITY_PUBLIC} | ${VISIBILITY_PRIVATE} | ${true}
+ itemVisibility | currentGroupVisibility | isPopoverShown
+ ${VISIBILITY_LEVEL_PRIVATE} | ${VISIBILITY_LEVEL_PUBLIC} | ${false}
+ ${VISIBILITY_LEVEL_INTERNAL} | ${VISIBILITY_LEVEL_PUBLIC} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC} | ${VISIBILITY_LEVEL_PUBLIC} | ${false}
+ ${VISIBILITY_LEVEL_PRIVATE} | ${VISIBILITY_LEVEL_PRIVATE} | ${false}
+ ${VISIBILITY_LEVEL_INTERNAL} | ${VISIBILITY_LEVEL_PRIVATE} | ${true}
+ ${VISIBILITY_LEVEL_PUBLIC} | ${VISIBILITY_LEVEL_PRIVATE} | ${true}
+ ${VISIBILITY_LEVEL_PRIVATE} | ${VISIBILITY_LEVEL_INTERNAL} | ${false}
+ ${VISIBILITY_LEVEL_INTERNAL} | ${VISIBILITY_LEVEL_INTERNAL} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC} | ${VISIBILITY_LEVEL_INTERNAL} | ${true}
`(
'when item visibility is $itemVisibility and parent group visibility is $currentGroupVisibility',
({ itemVisibility, currentGroupVisibility, isPopoverShown }) => {
@@ -347,8 +351,17 @@ describe('GroupItemComponent', () => {
});
if (isPopoverShown) {
- it('renders visibility warning popover', () => {
- expect(findPopover().exists()).toBe(true);
+ it('renders visibility warning popover with `Learn more` link', () => {
+ const popover = findPopover();
+
+ expect(popover.exists()).toBe(true);
+ expect(
+ popover.findByRole('link', { name: GroupItem.i18n.learnMore }).attributes('href'),
+ ).toBe(
+ helpPagePath('user/project/members/share_project_with_groups', {
+ anchor: 'sharing-projects-with-groups-of-a-higher-restrictive-visibility-level',
+ }),
+ );
});
} else {
itDoesNotRenderVisibilityWarningPopover();
@@ -361,7 +374,7 @@ describe('GroupItemComponent', () => {
wrapper = createComponent({
group: {
...mockParentGroupItem,
- visibility: VISIBILITY_PUBLIC,
+ visibility: VISIBILITY_LEVEL_PUBLIC,
type: ITEM_TYPE.PROJECT,
},
parentGroup: mockChildren[0],
diff --git a/spec/frontend/groups/components/group_name_and_path_spec.js b/spec/frontend/groups/components/group_name_and_path_spec.js
index 9c9bdead6fa..823d2ed286a 100644
--- a/spec/frontend/groups/components/group_name_and_path_spec.js
+++ b/spec/frontend/groups/components/group_name_and_path_spec.js
@@ -1,18 +1,23 @@
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
import { merge } from 'lodash';
-import { GlAlert } from '@gitlab/ui';
+import { GlAlert, GlDropdown, GlTruncate, GlDropdownItem } from '@gitlab/ui';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
import GroupNameAndPath from '~/groups/components/group_name_and_path.vue';
import { getGroupPathAvailability } from '~/rest_api';
import { createAlert } from '~/flash';
import { helpPagePath } from '~/helpers/help_page_helper';
+import searchGroupsWhereUserCanCreateSubgroups from '~/groups/queries/search_groups_where_user_can_create_subgroups.query.graphql';
jest.mock('~/flash');
jest.mock('~/rest_api', () => ({
getGroupPathAvailability: jest.fn(),
}));
+Vue.use(VueApollo);
+
describe('GroupNameAndPath', () => {
let wrapper;
@@ -20,6 +25,17 @@ describe('GroupNameAndPath', () => {
const mockGroupUrl = 'my-awesome-group';
const mockGroupUrlSuggested = 'my-awesome-group1';
+ const mockQueryResponse = jest.fn().mockResolvedValue({
+ data: {
+ currentUser: {
+ id: '1',
+ groups: {
+ nodes: [{ id: '2', fullPath: '/path2' }],
+ },
+ },
+ },
+ });
+
const defaultProvide = {
basePath: 'http://gitlab.com/',
fields: {
@@ -32,13 +48,20 @@ describe('GroupNameAndPath', () => {
pattern: '[a-zA-Z0-9_\\.][a-zA-Z0-9_\\-\\.]*[a-zA-Z0-9_\\-]|[a-zA-Z0-9_]',
},
parentId: { name: 'group[parent_id]', id: 'group_parent_id', value: '1' },
+ parentFullPath: { name: 'group[parent_full_path]', id: 'group_full_path', value: '/path1' },
groupId: { name: 'group[id]', id: 'group_id', value: '' },
},
+ newSubgroup: false,
mattermostEnabled: false,
};
const createComponent = ({ provide = {} } = {}) => {
- wrapper = mountExtended(GroupNameAndPath, { provide: merge({}, defaultProvide, provide) });
+ wrapper = mountExtended(GroupNameAndPath, {
+ provide: merge({}, defaultProvide, provide),
+ apolloProvider: createMockApollo([
+ [searchGroupsWhereUserCanCreateSubgroups, mockQueryResponse],
+ ]),
+ });
};
const createComponentEditGroup = ({ path = mockGroupUrl } = {}) => {
createComponent({
@@ -46,8 +69,11 @@ describe('GroupNameAndPath', () => {
});
};
- const findGroupNameField = () => wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.name.label);
- const findGroupUrlField = () => wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.path.label);
+ const findGroupNameField = () => wrapper.findByLabelText('Group name');
+ const findGroupUrlField = () => wrapper.findByLabelText('Group URL');
+ const findSubgroupNameField = () => wrapper.findByLabelText('Subgroup name');
+ const findSubgroupSlugField = () => wrapper.findByLabelText('Subgroup slug');
+ const findSelectedGroup = () => wrapper.findComponent(GlTruncate);
const findAlert = () => extendedWrapper(wrapper.findComponent(GlAlert));
const apiMockAvailablePath = () => {
@@ -79,6 +105,41 @@ describe('GroupNameAndPath', () => {
});
});
+ describe('when creating a new subgroup', () => {
+ beforeEach(() => {
+ createComponent({ provide: { newSubgroup: true } });
+ });
+
+ it('updates `Subgroup slug` field as user types', async () => {
+ await findSubgroupNameField().setValue(mockGroupName);
+
+ expect(findSubgroupSlugField().element.value).toBe(mockGroupUrl);
+ });
+
+ describe('when user selects parent group', () => {
+ it('updates `Subgroup URL` dropdown and calls API', async () => {
+ expect(findSelectedGroup().text()).toContain('/path1');
+
+ await findSubgroupNameField().setValue(mockGroupName);
+
+ wrapper.findComponent(GlDropdown).vm.$emit('shown');
+ await wrapper.vm.$apollo.queries.currentUserGroups.refetch();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+
+ wrapper.findComponent(GlDropdownItem).vm.$emit('click');
+ await nextTick();
+
+ expect(findSelectedGroup().text()).toContain('/path2');
+ expect(getGroupPathAvailability).toHaveBeenCalled();
+
+ expect(wrapper.findByText(GroupNameAndPath.i18n.inputs.path.validFeedback).exists()).toBe(
+ true,
+ );
+ });
+ });
+ });
+
describe('when editing a group', () => {
it('does not update `Group URL` field and does not call API', async () => {
const groupUrl = 'foo-bar';
@@ -346,9 +407,7 @@ describe('GroupNameAndPath', () => {
it('shows `Group ID` field', () => {
createComponentEditGroup();
- expect(
- wrapper.findByLabelText(GroupNameAndPath.i18n.inputs.groupId.label).element.value,
- ).toBe('1');
+ expect(wrapper.findByLabelText('Group ID').element.value).toBe('1');
});
});
});
diff --git a/spec/frontend/groups/components/groups_spec.js b/spec/frontend/groups/components/groups_spec.js
index 48a2319cf96..6c1eb373b7e 100644
--- a/spec/frontend/groups/components/groups_spec.js
+++ b/spec/frontend/groups/components/groups_spec.js
@@ -6,7 +6,7 @@ import GroupItemComponent from '~/groups/components/group_item.vue';
import PaginationLinks from '~/vue_shared/components/pagination_links.vue';
import GroupsComponent from '~/groups/components/groups.vue';
import eventHub from '~/groups/event_hub';
-import { VISIBILITY_PRIVATE } from '~/groups/constants';
+import { VISIBILITY_LEVEL_PRIVATE } from '~/visibility_level/constants';
import { mockGroups, mockPageInfo } from '../mock_data';
describe('GroupsComponent', () => {
@@ -26,7 +26,7 @@ describe('GroupsComponent', () => {
...propsData,
},
provide: {
- currentGroupVisibility: VISIBILITY_PRIVATE,
+ currentGroupVisibility: VISIBILITY_LEVEL_PRIVATE,
},
});
};
diff --git a/spec/frontend/groups/components/transfer_group_form_spec.js b/spec/frontend/groups/components/transfer_group_form_spec.js
index 6dc760f4f7c..8cfe8ce8e18 100644
--- a/spec/frontend/groups/components/transfer_group_form_spec.js
+++ b/spec/frontend/groups/components/transfer_group_form_spec.js
@@ -82,7 +82,6 @@ describe('Transfer group form', () => {
it('sets the confirm danger properties', () => {
expect(findConfirmDanger().props()).toMatchObject({
- buttonClass: 'qa-transfer-button',
disabled: true,
buttonText: confirmButtonText,
phrase: confirmationPhrase,
diff --git a/spec/frontend/header_search/components/app_spec.js b/spec/frontend/header_search/components/app_spec.js
index d89218f5542..6a138f9a247 100644
--- a/spec/frontend/header_search/components/app_spec.js
+++ b/spec/frontend/header_search/components/app_spec.js
@@ -15,6 +15,10 @@ import {
ICON_GROUP,
ICON_SUBGROUP,
SCOPE_TOKEN_MAX_LENGTH,
+ IS_SEARCHING,
+ IS_NOT_FOCUSED,
+ IS_FOCUSED,
+ SEARCH_SHORTCUTS_MIN_CHARACTERS,
} from '~/header_search/constants';
import DropdownKeyboardNavigation from '~/vue_shared/components/dropdown_keyboard_navigation.vue';
import { ENTER_KEY } from '~/lib/utils/keys';
@@ -170,6 +174,14 @@ describe('HeaderSearchApp', () => {
it(`should render the Dropdown Navigation Component`, () => {
expect(findDropdownKeyboardNavigation().exists()).toBe(true);
});
+
+ it(`should close the dropdown when press escape key`, async () => {
+ findHeaderSearchInput().vm.$emit('keydown', new KeyboardEvent({ key: 27 }));
+ await nextTick();
+ expect(findHeaderSearchDropdown().exists()).toBe(false);
+ // only one event emmited from findHeaderSearchInput().vm.$emit('click');
+ expect(wrapper.emitted().expandSearchBar.length).toBe(1);
+ });
});
});
@@ -245,6 +257,7 @@ describe('HeaderSearchApp', () => {
searchOptions: () => searchOptions,
},
);
+ findHeaderSearchInput().vm.$emit('click');
});
it(`${hasToken ? 'is' : 'is NOT'} rendered when data set has type "${
@@ -263,47 +276,43 @@ describe('HeaderSearchApp', () => {
});
});
- describe('form wrapper', () => {
+ describe('form', () => {
describe.each`
- searchContext | search | searchOptions
- ${MOCK_SEARCH_CONTEXT_FULL} | ${null} | ${[]}
- ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${[]}
- ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS}
- ${null} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS}
- ${null} | ${null} | ${MOCK_SCOPED_SEARCH_OPTIONS}
- ${null} | ${null} | ${[]}
- `('', ({ searchContext, search, searchOptions }) => {
+ searchContext | search | searchOptions | isFocused
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${null} | ${[]} | ${true}
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${[]} | ${true}
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
+ ${MOCK_SEARCH_CONTEXT_FULL} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${false}
+ ${null} | ${MOCK_SEARCH} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
+ ${null} | ${null} | ${MOCK_SCOPED_SEARCH_OPTIONS} | ${true}
+ ${null} | ${null} | ${[]} | ${true}
+ `('wrapper', ({ searchContext, search, searchOptions, isFocused }) => {
beforeEach(() => {
window.gon.current_username = MOCK_USERNAME;
-
createComponent({ search, searchContext }, { searchOptions: () => searchOptions });
-
- findHeaderSearchInput().vm.$emit('click');
+ if (isFocused) {
+ findHeaderSearchInput().vm.$emit('click');
+ }
});
- const hasIcon = Boolean(searchContext?.group);
- const isSearching = Boolean(search);
- const isActive = Boolean(searchOptions.length > 0);
+ const isSearching = search?.length > SEARCH_SHORTCUTS_MIN_CHARACTERS;
- it(`${hasIcon ? 'with' : 'without'} search context classes contain "${
- hasIcon ? 'has-icon' : 'has-no-icon'
- }"`, () => {
- const iconClassRegex = hasIcon ? 'has-icon' : 'has-no-icon';
- expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
+ it(`classes ${isSearching ? 'contain' : 'do not contain'} "${IS_SEARCHING}"`, () => {
+ if (isSearching) {
+ expect(findHeaderSearchForm().classes()).toContain(IS_SEARCHING);
+ return;
+ }
+ if (!isSearching) {
+ expect(findHeaderSearchForm().classes()).not.toContain(IS_SEARCHING);
+ }
});
- it(`${isSearching ? 'with' : 'without'} search string classes contain "${
- isSearching ? 'is-searching' : 'is-not-searching'
+ it(`classes ${isSearching ? 'contain' : 'do not contain'} "${
+ isFocused ? IS_FOCUSED : IS_NOT_FOCUSED
}"`, () => {
- const iconClassRegex = isSearching ? 'is-searching' : 'is-not-searching';
- expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
- });
-
- it(`${isActive ? 'with' : 'without'} search results classes contain "${
- isActive ? 'is-active' : 'is-not-active'
- }"`, () => {
- const iconClassRegex = isActive ? 'is-active' : 'is-not-active';
- expect(findHeaderSearchForm().classes()).toContain(iconClassRegex);
+ expect(findHeaderSearchForm().classes()).toContain(
+ isFocused ? IS_FOCUSED : IS_NOT_FOCUSED,
+ );
});
});
});
@@ -323,6 +332,7 @@ describe('HeaderSearchApp', () => {
searchOptions: () => searchOptions,
},
);
+ findHeaderSearchInput().vm.$emit('click');
});
it(`icon for data set type "${searchOptions[0]?.html_id}" ${
diff --git a/spec/frontend/helpers/diffs_helper_spec.js b/spec/frontend/helpers/diffs_helper_spec.js
index b223d48bf5c..c1ac7fac3fd 100644
--- a/spec/frontend/helpers/diffs_helper_spec.js
+++ b/spec/frontend/helpers/diffs_helper_spec.js
@@ -14,45 +14,45 @@ describe('diffs helper', () => {
describe('hasInlineLines', () => {
it('is false when the file does not exist', () => {
- expect(diffsHelper.hasInlineLines()).toBeFalsy();
+ expect(diffsHelper.hasInlineLines()).toBe(false);
});
it('is false when the file does not have the highlighted_diff_lines property', () => {
const missingInline = getDiffFile({ highlighted_diff_lines: undefined });
- expect(diffsHelper.hasInlineLines(missingInline)).toBeFalsy();
+ expect(diffsHelper.hasInlineLines(missingInline)).toBe(false);
});
it('is false when the file has zero highlighted_diff_lines', () => {
const emptyInline = getDiffFile({ highlighted_diff_lines: [] });
- expect(diffsHelper.hasInlineLines(emptyInline)).toBeFalsy();
+ expect(diffsHelper.hasInlineLines(emptyInline)).toBe(false);
});
it('is true when the file has at least 1 highlighted_diff_lines', () => {
- expect(diffsHelper.hasInlineLines(getDiffFile())).toBeTruthy();
+ expect(diffsHelper.hasInlineLines(getDiffFile())).toBe(true);
});
});
describe('hasParallelLines', () => {
it('is false when the file does not exist', () => {
- expect(diffsHelper.hasParallelLines()).toBeFalsy();
+ expect(diffsHelper.hasParallelLines()).toBe(false);
});
it('is false when the file does not have the parallel_diff_lines property', () => {
const missingInline = getDiffFile({ parallel_diff_lines: undefined });
- expect(diffsHelper.hasParallelLines(missingInline)).toBeFalsy();
+ expect(diffsHelper.hasParallelLines(missingInline)).toBe(false);
});
it('is false when the file has zero parallel_diff_lines', () => {
const emptyInline = getDiffFile({ parallel_diff_lines: [] });
- expect(diffsHelper.hasParallelLines(emptyInline)).toBeFalsy();
+ expect(diffsHelper.hasParallelLines(emptyInline)).toBe(false);
});
it('is true when the file has at least 1 parallel_diff_lines', () => {
- expect(diffsHelper.hasParallelLines(getDiffFile())).toBeTruthy();
+ expect(diffsHelper.hasParallelLines(getDiffFile())).toBe(true);
});
});
@@ -61,16 +61,16 @@ describe('diffs helper', () => {
const noParallelLines = getDiffFile({ parallel_diff_lines: undefined });
const emptyParallelLines = getDiffFile({ parallel_diff_lines: [] });
- expect(diffsHelper.isSingleViewStyle(noParallelLines)).toBeTruthy();
- expect(diffsHelper.isSingleViewStyle(emptyParallelLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(noParallelLines)).toBe(true);
+ expect(diffsHelper.isSingleViewStyle(emptyParallelLines)).toBe(true);
});
it('is true when the file has at least 1 parallel line but no inline lines for any reason', () => {
const noInlineLines = getDiffFile({ highlighted_diff_lines: undefined });
const emptyInlineLines = getDiffFile({ highlighted_diff_lines: [] });
- expect(diffsHelper.isSingleViewStyle(noInlineLines)).toBeTruthy();
- expect(diffsHelper.isSingleViewStyle(emptyInlineLines)).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(noInlineLines)).toBe(true);
+ expect(diffsHelper.isSingleViewStyle(emptyInlineLines)).toBe(true);
});
it('is true when the file does not have any inline lines or parallel lines for any reason', () => {
@@ -83,13 +83,13 @@ describe('diffs helper', () => {
parallel_diff_lines: [],
});
- expect(diffsHelper.isSingleViewStyle(noLines)).toBeTruthy();
- expect(diffsHelper.isSingleViewStyle(emptyLines)).toBeTruthy();
- expect(diffsHelper.isSingleViewStyle()).toBeTruthy();
+ expect(diffsHelper.isSingleViewStyle(noLines)).toBe(true);
+ expect(diffsHelper.isSingleViewStyle(emptyLines)).toBe(true);
+ expect(diffsHelper.isSingleViewStyle()).toBe(true);
});
it('is false when the file has both inline and parallel lines', () => {
- expect(diffsHelper.isSingleViewStyle(getDiffFile())).toBeFalsy();
+ expect(diffsHelper.isSingleViewStyle(getDiffFile())).toBe(false);
});
});
diff --git a/spec/frontend/ide/components/activity_bar_spec.js b/spec/frontend/ide/components/activity_bar_spec.js
index 39fe2c7e723..a97e883a8bf 100644
--- a/spec/frontend/ide/components/activity_bar_spec.js
+++ b/spec/frontend/ide/components/activity_bar_spec.js
@@ -1,86 +1,82 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
+import { GlBadge } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import ActivityBar from '~/ide/components/activity_bar.vue';
import { leftSidebarViews } from '~/ide/constants';
import { createStore } from '~/ide/stores';
-describe('IDE activity bar', () => {
- const Component = Vue.extend(ActivityBar);
- let vm;
+describe('IDE ActivityBar component', () => {
+ let wrapper;
let store;
- const findChangesBadge = () => vm.$el.querySelector('.badge');
+ const findChangesBadge = () => wrapper.findComponent(GlBadge);
- beforeEach(() => {
+ const mountComponent = (state) => {
store = createStore();
-
- Vue.set(store.state.projects, 'abcproject', {
- web_url: 'testing',
+ store.replaceState({
+ ...store.state,
+ projects: { abcproject: { web_url: 'testing' } },
+ currentProjectId: 'abcproject',
+ ...state,
});
- Vue.set(store.state, 'currentProjectId', 'abcproject');
- vm = createComponentWithStore(Component, store);
- });
+ wrapper = shallowMount(ActivityBar, { store });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('updateActivityBarView', () => {
beforeEach(() => {
- jest.spyOn(vm, 'updateActivityBarView').mockImplementation(() => {});
-
- vm.$mount();
+ mountComponent();
+ jest.spyOn(wrapper.vm, 'updateActivityBarView').mockImplementation(() => {});
});
it('calls updateActivityBarView with edit value on click', () => {
- vm.$el.querySelector('.js-ide-edit-mode').click();
+ wrapper.find('.js-ide-edit-mode').trigger('click');
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.edit.name);
+ expect(wrapper.vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.edit.name);
});
it('calls updateActivityBarView with commit value on click', () => {
- vm.$el.querySelector('.js-ide-commit-mode').click();
+ wrapper.find('.js-ide-commit-mode').trigger('click');
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.commit.name);
+ expect(wrapper.vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.commit.name);
});
it('calls updateActivityBarView with review value on click', () => {
- vm.$el.querySelector('.js-ide-review-mode').click();
+ wrapper.find('.js-ide-review-mode').trigger('click');
- expect(vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.review.name);
+ expect(wrapper.vm.updateActivityBarView).toHaveBeenCalledWith(leftSidebarViews.review.name);
});
});
describe('active item', () => {
- beforeEach(() => {
- vm.$mount();
- });
-
it('sets edit item active', () => {
- expect(vm.$el.querySelector('.js-ide-edit-mode').classList).toContain('active');
+ mountComponent();
+
+ expect(wrapper.find('.js-ide-edit-mode').classes()).toContain('active');
});
- it('sets commit item active', async () => {
- vm.$store.state.currentActivityView = leftSidebarViews.commit.name;
+ it('sets commit item active', () => {
+ mountComponent({ currentActivityView: leftSidebarViews.commit.name });
- await nextTick();
- expect(vm.$el.querySelector('.js-ide-commit-mode').classList).toContain('active');
+ expect(wrapper.find('.js-ide-commit-mode').classes()).toContain('active');
});
});
describe('changes badge', () => {
it('is rendered when files are staged', () => {
- store.state.stagedFiles = [{ path: '/path/to/file' }];
- vm.$mount();
+ mountComponent({ stagedFiles: [{ path: '/path/to/file' }] });
- expect(findChangesBadge()).toBeTruthy();
- expect(findChangesBadge().textContent.trim()).toBe('1');
+ expect(findChangesBadge().exists()).toBe(true);
+ expect(findChangesBadge().text()).toBe('1');
});
it('is not rendered when no changes are present', () => {
- vm.$mount();
- expect(findChangesBadge()).toBeFalsy();
+ mountComponent();
+
+ expect(findChangesBadge().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/ide/components/branches/item_spec.js b/spec/frontend/ide/components/branches/item_spec.js
index 271d0600e16..3dbd1210916 100644
--- a/spec/frontend/ide/components/branches/item_spec.js
+++ b/spec/frontend/ide/components/branches/item_spec.js
@@ -44,8 +44,8 @@ describe('IDE branch item', () => {
});
it('renders branch name and timeago', () => {
expect(wrapper.text()).toContain(TEST_BRANCH.name);
- expect(wrapper.find(Timeago).props('time')).toBe(TEST_BRANCH.committedDate);
- expect(wrapper.find(GlIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(Timeago).props('time')).toBe(TEST_BRANCH.committedDate);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
});
it('renders link to branch', () => {
@@ -60,6 +60,6 @@ describe('IDE branch item', () => {
it('renders icon if is not active', () => {
createComponent({ isActive: true });
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/components/branches/search_list_spec.js b/spec/frontend/ide/components/branches/search_list_spec.js
index b6e3274153a..bbde45d700f 100644
--- a/spec/frontend/ide/components/branches/search_list_spec.js
+++ b/spec/frontend/ide/components/branches/search_list_spec.js
@@ -47,7 +47,7 @@ describe('IDE branches search list', () => {
it('renders loading icon when `isLoading` is true', () => {
createComponent({ isLoading: true });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders branches not found when search is not empty and branches list is empty', async () => {
@@ -61,7 +61,7 @@ describe('IDE branches search list', () => {
describe('with branches', () => {
it('renders list', () => {
createComponent({ branches });
- const items = wrapper.findAll(Item);
+ const items = wrapper.findAllComponents(Item);
expect(items.length).toBe(branches.length);
});
@@ -69,7 +69,7 @@ describe('IDE branches search list', () => {
it('renders check next to active branch', () => {
const activeBranch = 'regular';
createComponent({ branches }, activeBranch);
- const items = wrapper.findAll(Item).filter((w) => w.props('isActive'));
+ const items = wrapper.findAllComponents(Item).filter((w) => w.props('isActive'));
expect(items.length).toBe(1);
expect(items.at(0).props('item').name).toBe(activeBranch);
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
index d77e8e3d04c..f6d5833edee 100644
--- a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -26,8 +26,8 @@ describe('IDE commit editor header', () => {
});
};
- const findDiscardModal = () => wrapper.find({ ref: 'discardModal' });
- const findDiscardButton = () => wrapper.find({ ref: 'discardButton' });
+ const findDiscardModal = () => wrapper.findComponent({ ref: 'discardModal' });
+ const findDiscardButton = () => wrapper.findComponent({ ref: 'discardButton' });
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/ide/components/commit_sidebar/form_spec.js b/spec/frontend/ide/components/commit_sidebar/form_spec.js
index 28f62a9775a..a8ee81afa0b 100644
--- a/spec/frontend/ide/components/commit_sidebar/form_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/form_spec.js
@@ -58,7 +58,7 @@ describe('IDE commit form', () => {
});
const findForm = () => wrapper.find('form');
const submitForm = () => findForm().trigger('submit');
- const findCommitMessageInput = () => wrapper.find(CommitMessageField);
+ const findCommitMessageInput = () => wrapper.findComponent(CommitMessageField);
const setCommitMessageInput = (val) => findCommitMessageInput().vm.$emit('input', val);
const findDiscardDraftButton = () => wrapper.find('[data-testid="discard-draft"]');
@@ -302,7 +302,7 @@ describe('IDE commit form', () => {
${() => createCodeownersCommitError('test message')} | ${{ actionPrimary: { text: 'Create new branch' } }}
${createUnexpectedCommitError} | ${{ actionPrimary: null }}
`('opens error modal if commitError with $error', async ({ createError, props }) => {
- const modal = wrapper.find(GlModal);
+ const modal = wrapper.findComponent(GlModal);
modal.vm.show = jest.fn();
const error = createError();
@@ -343,7 +343,7 @@ describe('IDE commit form', () => {
await nextTick();
- wrapper.find(GlModal).vm.$emit('ok');
+ wrapper.findComponent(GlModal).vm.$emit('ok');
await waitForPromises();
diff --git a/spec/frontend/ide/components/error_message_spec.js b/spec/frontend/ide/components/error_message_spec.js
index 17568158131..204d39de741 100644
--- a/spec/frontend/ide/components/error_message_spec.js
+++ b/spec/frontend/ide/components/error_message_spec.js
@@ -105,7 +105,7 @@ describe('IDE error message component', () => {
findActionButton().trigger('click');
await nextTick();
- expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(true);
resolveAction();
});
@@ -113,7 +113,7 @@ describe('IDE error message component', () => {
findActionButton().trigger('click');
await actionMock();
await nextTick();
- expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(false);
});
});
});
diff --git a/spec/frontend/ide/components/file_templates/dropdown_spec.js b/spec/frontend/ide/components/file_templates/dropdown_spec.js
index e54b322b9db..ee90d87357c 100644
--- a/spec/frontend/ide/components/file_templates/dropdown_spec.js
+++ b/spec/frontend/ide/components/file_templates/dropdown_spec.js
@@ -94,7 +94,7 @@ describe('IDE file templates dropdown component', () => {
it('shows loader when isLoading is true', () => {
createComponent({ props: defaultAsyncProps, state: { isLoading: true } });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders templates', () => {
diff --git a/spec/frontend/ide/components/ide_file_row_spec.js b/spec/frontend/ide/components/ide_file_row_spec.js
index baf3d7cca9d..aa66224fa19 100644
--- a/spec/frontend/ide/components/ide_file_row_spec.js
+++ b/spec/frontend/ide/components/ide_file_row_spec.js
@@ -39,8 +39,8 @@ describe('Ide File Row component', () => {
wrapper = null;
});
- const findFileRowExtra = () => wrapper.find(FileRowExtra);
- const findFileRow = () => wrapper.find(FileRow);
+ const findFileRowExtra = () => wrapper.findComponent(FileRowExtra);
+ const findFileRow = () => wrapper.findComponent(FileRow);
const hasDropdownOpen = () => findFileRowExtra().props('dropdownOpen');
it('fileRow component has listeners', async () => {
diff --git a/spec/frontend/ide/components/ide_project_header_spec.js b/spec/frontend/ide/components/ide_project_header_spec.js
index fc39651c661..d0636352a3f 100644
--- a/spec/frontend/ide/components/ide_project_header_spec.js
+++ b/spec/frontend/ide/components/ide_project_header_spec.js
@@ -3,6 +3,7 @@ import IDEProjectHeader from '~/ide/components/ide_project_header.vue';
import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
const mockProject = {
+ id: 1,
name: 'test proj',
avatar_url: 'https://gitlab.com',
path_with_namespace: 'path/with-namespace',
@@ -30,6 +31,7 @@ describe('IDE project header', () => {
it('renders ProjectAvatar with correct props', () => {
expect(findProjectAvatar().props()).toMatchObject({
+ projectId: mockProject.id,
projectName: mockProject.name,
projectAvatarUrl: mockProject.avatar_url,
});
diff --git a/spec/frontend/ide/components/ide_review_spec.js b/spec/frontend/ide/components/ide_review_spec.js
index 13d20761263..0759f957374 100644
--- a/spec/frontend/ide/components/ide_review_spec.js
+++ b/spec/frontend/ide/components/ide_review_spec.js
@@ -42,7 +42,7 @@ describe('IDE review mode', () => {
let inititializeSpy;
beforeEach(async () => {
- inititializeSpy = jest.spyOn(wrapper.find(IdeReview).vm, 'initialize');
+ inititializeSpy = jest.spyOn(wrapper.findComponent(IdeReview).vm, 'initialize');
store.state.viewer = 'editor';
await wrapper.vm.reactivate();
@@ -85,7 +85,7 @@ describe('IDE review mode', () => {
});
it('renders edit dropdown', () => {
- expect(wrapper.find(EditorModeDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(EditorModeDropdown).exists()).toBe(true);
});
it('renders merge request link & IID', async () => {
diff --git a/spec/frontend/ide/components/ide_side_bar_spec.js b/spec/frontend/ide/components/ide_side_bar_spec.js
index 4469c3fc901..4784d6c516f 100644
--- a/spec/frontend/ide/components/ide_side_bar_spec.js
+++ b/spec/frontend/ide/components/ide_side_bar_spec.js
@@ -47,32 +47,32 @@ describe('IdeSidebar', () => {
await nextTick();
- expect(wrapper.findAll(GlSkeletonLoader)).toHaveLength(3);
+ expect(wrapper.findAllComponents(GlSkeletonLoader)).toHaveLength(3);
});
describe('deferred rendering components', () => {
it('fetches components on demand', async () => {
wrapper = createComponent();
- expect(wrapper.find(IdeTree).exists()).toBe(true);
- expect(wrapper.find(IdeReview).exists()).toBe(false);
- expect(wrapper.find(RepoCommitSection).exists()).toBe(false);
+ expect(wrapper.findComponent(IdeTree).exists()).toBe(true);
+ expect(wrapper.findComponent(IdeReview).exists()).toBe(false);
+ expect(wrapper.findComponent(RepoCommitSection).exists()).toBe(false);
store.state.currentActivityView = leftSidebarViews.review.name;
await waitForPromises();
await nextTick();
- expect(wrapper.find(IdeTree).exists()).toBe(false);
- expect(wrapper.find(IdeReview).exists()).toBe(true);
- expect(wrapper.find(RepoCommitSection).exists()).toBe(false);
+ expect(wrapper.findComponent(IdeTree).exists()).toBe(false);
+ expect(wrapper.findComponent(IdeReview).exists()).toBe(true);
+ expect(wrapper.findComponent(RepoCommitSection).exists()).toBe(false);
store.state.currentActivityView = leftSidebarViews.commit.name;
await waitForPromises();
await nextTick();
- expect(wrapper.find(IdeTree).exists()).toBe(false);
- expect(wrapper.find(IdeReview).exists()).toBe(false);
- expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
+ expect(wrapper.findComponent(IdeTree).exists()).toBe(false);
+ expect(wrapper.findComponent(IdeReview).exists()).toBe(false);
+ expect(wrapper.findComponent(RepoCommitSection).exists()).toBe(true);
});
it.each`
view | tree | review | commit
@@ -86,23 +86,23 @@ describe('IdeSidebar', () => {
await waitForPromises();
await nextTick();
- expect(wrapper.find(IdeTree).exists()).toBe(tree);
- expect(wrapper.find(IdeReview).exists()).toBe(review);
- expect(wrapper.find(RepoCommitSection).exists()).toBe(commit);
+ expect(wrapper.findComponent(IdeTree).exists()).toBe(tree);
+ expect(wrapper.findComponent(IdeReview).exists()).toBe(review);
+ expect(wrapper.findComponent(RepoCommitSection).exists()).toBe(commit);
});
});
it('keeps the current activity view components alive', async () => {
wrapper = createComponent();
- const ideTreeComponent = wrapper.find(IdeTree).element;
+ const ideTreeComponent = wrapper.findComponent(IdeTree).element;
store.state.currentActivityView = leftSidebarViews.commit.name;
await waitForPromises();
await nextTick();
- expect(wrapper.find(IdeTree).exists()).toBe(false);
- expect(wrapper.find(RepoCommitSection).exists()).toBe(true);
+ expect(wrapper.findComponent(IdeTree).exists()).toBe(false);
+ expect(wrapper.findComponent(RepoCommitSection).exists()).toBe(true);
store.state.currentActivityView = leftSidebarViews.edit.name;
@@ -110,6 +110,6 @@ describe('IdeSidebar', () => {
await nextTick();
// reference to the elements remains the same, meaning the components were kept alive
- expect(wrapper.find(IdeTree).element).toEqual(ideTreeComponent);
+ expect(wrapper.findComponent(IdeTree).element).toEqual(ideTreeComponent);
});
});
diff --git a/spec/frontend/ide/components/ide_sidebar_nav_spec.js b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
index 2ea0c250794..80e8aba4072 100644
--- a/spec/frontend/ide/components/ide_sidebar_nav_spec.js
+++ b/spec/frontend/ide/components/ide_sidebar_nav_spec.js
@@ -8,12 +8,12 @@ import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
const TEST_TABS = [
{
title: 'Lorem',
- icon: 'angle-up',
+ icon: 'chevron-lg-up',
views: [{ name: 'lorem-1' }, { name: 'lorem-2' }],
},
{
title: 'Ipsum',
- icon: 'angle-down',
+ icon: 'chevron-lg-down',
views: [{ name: 'ipsum-1' }, { name: 'ipsum-2' }],
},
];
@@ -55,7 +55,7 @@ describe('ide/components/ide_sidebar_nav', () => {
ariaLabel: button.attributes('aria-label'),
classes: button.classes(),
qaSelector: button.attributes('data-qa-selector'),
- icon: button.find(GlIcon).props('name'),
+ icon: button.findComponent(GlIcon).props('name'),
tooltip: getBinding(button.element, 'tooltip').value,
};
});
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index 9172c69b10e..48c670757a2 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -82,7 +82,7 @@ describe('WebIDE', () => {
await waitForPromises();
- expect(wrapper.find(ErrorMessage).exists()).toBe(exists);
+ expect(wrapper.findComponent(ErrorMessage).exists()).toBe(exists);
},
);
});
diff --git a/spec/frontend/ide/components/ide_status_bar_spec.js b/spec/frontend/ide/components/ide_status_bar_spec.js
index 17a5aa17b1f..e6e0ebaf1e8 100644
--- a/spec/frontend/ide/components/ide_status_bar_spec.js
+++ b/spec/frontend/ide/components/ide_status_bar_spec.js
@@ -1,8 +1,8 @@
+import { mount } from '@vue/test-utils';
import _ from 'lodash';
-import Vue, { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
import IdeStatusBar from '~/ide/components/ide_status_bar.vue';
+import IdeStatusMR from '~/ide/components/ide_status_mr.vue';
import { rightSidebarViews } from '~/ide/constants';
import { createStore } from '~/ide/stores';
import { projectData } from '../mock_data';
@@ -13,42 +13,48 @@ const TEST_MERGE_REQUEST_URL = `${TEST_HOST}merge-requests/${TEST_MERGE_REQUEST_
jest.mock('~/lib/utils/poll');
-describe('ideStatusBar', () => {
- let store;
- let vm;
+describe('IdeStatusBar component', () => {
+ let wrapper;
+
+ const findMRStatus = () => wrapper.findComponent(IdeStatusMR);
+
+ const mountComponent = (state = {}) => {
+ const store = createStore();
+ store.replaceState({
+ ...store.state,
+ currentBranchId: 'main',
+ currentProjectId: TEST_PROJECT_ID,
+ projects: {
+ ...store.state.projects,
+ [TEST_PROJECT_ID]: _.clone(projectData),
+ },
+ ...state,
+ });
- const createComponent = () => {
- vm = createComponentWithStore(Vue.extend(IdeStatusBar), store).$mount();
+ wrapper = mount(IdeStatusBar, { store });
};
- const findMRStatus = () => vm.$el.querySelector('.js-ide-status-mr');
-
- beforeEach(() => {
- store = createStore();
- store.state.currentProjectId = TEST_PROJECT_ID;
- store.state.projects[TEST_PROJECT_ID] = _.clone(projectData);
- store.state.currentBranchId = 'main';
- });
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('default', () => {
- beforeEach(() => {
- createComponent();
- });
-
it('triggers a setInterval', () => {
- expect(vm.intervalId).not.toBe(null);
+ mountComponent();
+
+ expect(wrapper.vm.intervalId).not.toBe(null);
});
it('renders the statusbar', () => {
- expect(vm.$el.className).toBe('ide-status-bar');
+ mountComponent();
+
+ expect(wrapper.classes()).toEqual(['ide-status-bar']);
});
describe('commitAgeUpdate', () => {
beforeEach(() => {
- jest.spyOn(vm, 'commitAgeUpdate').mockImplementation(() => {});
+ mountComponent();
+ jest.spyOn(wrapper.vm, 'commitAgeUpdate').mockImplementation(() => {});
});
afterEach(() => {
@@ -56,70 +62,82 @@ describe('ideStatusBar', () => {
});
it('gets called every second', () => {
- expect(vm.commitAgeUpdate).not.toHaveBeenCalled();
+ expect(wrapper.vm.commitAgeUpdate).not.toHaveBeenCalled();
jest.advanceTimersByTime(1000);
- expect(vm.commitAgeUpdate.mock.calls.length).toEqual(1);
+ expect(wrapper.vm.commitAgeUpdate.mock.calls).toHaveLength(1);
jest.advanceTimersByTime(1000);
- expect(vm.commitAgeUpdate.mock.calls.length).toEqual(2);
+ expect(wrapper.vm.commitAgeUpdate.mock.calls).toHaveLength(2);
});
});
describe('getCommitPath', () => {
it('returns the path to the commit details', () => {
- expect(vm.getCommitPath('abc123de')).toBe('/commit/abc123de');
+ mountComponent();
+
+ expect(wrapper.vm.getCommitPath('abc123de')).toBe('/commit/abc123de');
});
});
describe('pipeline status', () => {
- it('opens right sidebar on clicking icon', async () => {
- jest.spyOn(vm, 'openRightPane').mockImplementation(() => {});
- Vue.set(vm.$store.state.pipelines, 'latestPipeline', {
- details: {
- status: {
- text: 'success',
- details_path: 'test',
- icon: 'status_success',
+ it('opens right sidebar on clicking icon', () => {
+ const pipelines = {
+ latestPipeline: {
+ details: {
+ status: {
+ text: 'success',
+ details_path: 'test',
+ icon: 'status_success',
+ },
+ },
+ commit: {
+ author_gravatar_url: 'www',
},
},
- commit: {
- author_gravatar_url: 'www',
- },
- });
+ };
+ mountComponent({ pipelines });
+ jest.spyOn(wrapper.vm, 'openRightPane').mockImplementation(() => {});
- await nextTick();
- vm.$el.querySelector('.ide-status-pipeline button').click();
+ wrapper.find('button').trigger('click');
- expect(vm.openRightPane).toHaveBeenCalledWith(rightSidebarViews.pipelines);
+ expect(wrapper.vm.openRightPane).toHaveBeenCalledWith(rightSidebarViews.pipelines);
});
});
it('does not show merge request status', () => {
- expect(findMRStatus()).toBe(null);
+ mountComponent();
+
+ expect(findMRStatus().exists()).toBe(false);
});
});
describe('with merge request in store', () => {
beforeEach(() => {
- store.state.projects[TEST_PROJECT_ID].mergeRequests = {
- [TEST_MERGE_REQUEST_ID]: {
- web_url: TEST_MERGE_REQUEST_URL,
- references: {
- short: `!${TEST_MERGE_REQUEST_ID}`,
+ const state = {
+ currentMergeRequestId: TEST_MERGE_REQUEST_ID,
+ projects: {
+ [TEST_PROJECT_ID]: {
+ ..._.clone(projectData),
+ mergeRequests: {
+ [TEST_MERGE_REQUEST_ID]: {
+ web_url: TEST_MERGE_REQUEST_URL,
+ references: {
+ short: `!${TEST_MERGE_REQUEST_ID}`,
+ },
+ },
+ },
},
},
};
- store.state.currentMergeRequestId = TEST_MERGE_REQUEST_ID;
-
- createComponent();
+ mountComponent(state);
});
it('shows merge request status', () => {
- expect(findMRStatus().textContent.trim()).toEqual(`Merge request !${TEST_MERGE_REQUEST_ID}`);
- expect(findMRStatus().querySelector('a').href).toEqual(TEST_MERGE_REQUEST_URL);
+ expect(findMRStatus().text()).toBe(`Merge request !${TEST_MERGE_REQUEST_ID}`);
+ expect(findMRStatus().find('a').attributes('href')).toBe(TEST_MERGE_REQUEST_URL);
});
});
});
diff --git a/spec/frontend/ide/components/ide_status_list_spec.js b/spec/frontend/ide/components/ide_status_list_spec.js
index 371fbc6becd..0b54e8b6afb 100644
--- a/spec/frontend/ide/components/ide_status_list_spec.js
+++ b/spec/frontend/ide/components/ide_status_list_spec.js
@@ -25,7 +25,7 @@ describe('ide/components/ide_status_list', () => {
let store;
let wrapper;
- const findLink = () => wrapper.find(GlLink);
+ const findLink = () => wrapper.findComponent(GlLink);
const createComponent = (options = {}) => {
store = new Vuex.Store({
getters: {
@@ -98,6 +98,6 @@ describe('ide/components/ide_status_list', () => {
it('renders terminal sync status', () => {
createComponent();
- expect(wrapper.find(TerminalSyncStatusSafe).exists()).toBe(true);
+ expect(wrapper.findComponent(TerminalSyncStatusSafe).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/components/ide_status_mr_spec.js b/spec/frontend/ide/components/ide_status_mr_spec.js
index 0526d4653f8..0b9111c0e2a 100644
--- a/spec/frontend/ide/components/ide_status_mr_spec.js
+++ b/spec/frontend/ide/components/ide_status_mr_spec.js
@@ -14,8 +14,8 @@ describe('ide/components/ide_status_mr', () => {
propsData: props,
});
};
- const findIcon = () => wrapper.find(GlIcon);
- const findLink = () => wrapper.find(GlLink);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findLink = () => wrapper.findComponent(GlLink);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/ide/components/ide_tree_spec.js b/spec/frontend/ide/components/ide_tree_spec.js
index 8465ef9f5f3..f00017a2736 100644
--- a/spec/frontend/ide/components/ide_tree_spec.js
+++ b/spec/frontend/ide/components/ide_tree_spec.js
@@ -41,7 +41,7 @@ describe('IdeTree', () => {
let inititializeSpy;
beforeEach(async () => {
- inititializeSpy = jest.spyOn(wrapper.find(IdeTree).vm, 'initialize');
+ inititializeSpy = jest.spyOn(wrapper.findComponent(IdeTree).vm, 'initialize');
store.state.viewer = 'diff';
await wrapper.vm.reactivate();
diff --git a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
index d632a34266a..5eb66f75978 100644
--- a/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
+++ b/spec/frontend/ide/components/jobs/detail/scroll_button_spec.js
@@ -27,7 +27,7 @@ describe('IDE job log scroll button', () => {
beforeEach(() => createComponent({ direction }));
it('returns proper icon name', () => {
- expect(wrapper.find(GlIcon).props('name')).toBe(icon);
+ expect(wrapper.findComponent(GlIcon).props('name')).toBe(icon);
});
it('returns proper title', () => {
diff --git a/spec/frontend/ide/components/jobs/list_spec.js b/spec/frontend/ide/components/jobs/list_spec.js
index cb2c9f8f04f..b4c7eb51781 100644
--- a/spec/frontend/ide/components/jobs/list_spec.js
+++ b/spec/frontend/ide/components/jobs/list_spec.js
@@ -58,29 +58,29 @@ describe('IDE stages list', () => {
it('renders loading icon when no stages & loading', () => {
createComponent({ loading: true, stages: [] });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders stages components for each stage', () => {
createComponent({ stages });
- expect(wrapper.findAll(Stage).length).toBe(stages.length);
+ expect(wrapper.findAllComponents(Stage).length).toBe(stages.length);
});
it('triggers fetchJobs action when stage emits fetch event', () => {
createComponent({ stages });
- wrapper.find(Stage).vm.$emit('fetch');
+ wrapper.findComponent(Stage).vm.$emit('fetch');
expect(storeActions.fetchJobs).toHaveBeenCalled();
});
it('triggers toggleStageCollapsed action when stage emits toggleCollapsed event', () => {
createComponent({ stages });
- wrapper.find(Stage).vm.$emit('toggleCollapsed');
+ wrapper.findComponent(Stage).vm.$emit('toggleCollapsed');
expect(storeActions.toggleStageCollapsed).toHaveBeenCalled();
});
it('triggers setDetailJob action when stage emits clickViewLog event', () => {
createComponent({ stages });
- wrapper.find(Stage).vm.$emit('clickViewLog');
+ wrapper.findComponent(Stage).vm.$emit('clickViewLog');
expect(storeActions.setDetailJob).toHaveBeenCalled();
});
diff --git a/spec/frontend/ide/components/jobs/stage_spec.js b/spec/frontend/ide/components/jobs/stage_spec.js
index f158c59cd32..1d5e5743a4d 100644
--- a/spec/frontend/ide/components/jobs/stage_spec.js
+++ b/spec/frontend/ide/components/jobs/stage_spec.js
@@ -18,8 +18,8 @@ describe('IDE pipeline stage', () => {
},
};
- const findHeader = () => wrapper.find({ ref: 'cardHeader' });
- const findJobList = () => wrapper.find({ ref: 'jobList' });
+ const findHeader = () => wrapper.findComponent({ ref: 'cardHeader' });
+ const findJobList = () => wrapper.findComponent({ ref: 'jobList' });
const createComponent = (props) => {
wrapper = shallowMount(Stage, {
@@ -45,7 +45,7 @@ describe('IDE pipeline stage', () => {
stage: { ...defaultProps.stage, isLoading: true, jobs: [] },
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('emits toggleCollaped event with stage id when clicking header', async () => {
@@ -60,7 +60,7 @@ describe('IDE pipeline stage', () => {
it('emits clickViewLog entity with job', async () => {
const [job] = defaultProps.stage.jobs;
createComponent();
- wrapper.findAll(Item).at(0).vm.$emit('clickViewLog', job);
+ wrapper.findAllComponents(Item).at(0).vm.$emit('clickViewLog', job);
await nextTick();
expect(wrapper.emitted().clickViewLog[0][0]).toBe(job);
});
diff --git a/spec/frontend/ide/components/merge_requests/list_spec.js b/spec/frontend/ide/components/merge_requests/list_spec.js
index 583671a0af6..ea6e2741a85 100644
--- a/spec/frontend/ide/components/merge_requests/list_spec.js
+++ b/spec/frontend/ide/components/merge_requests/list_spec.js
@@ -14,7 +14,7 @@ describe('IDE merge requests list', () => {
let fetchMergeRequestsMock;
const findSearchTypeButtons = () => wrapper.findAll('button');
- const findTokenedInput = () => wrapper.find(TokenedInput);
+ const findTokenedInput = () => wrapper.findComponent(TokenedInput);
const createComponent = (state = {}) => {
const { mergeRequests = {}, ...restOfState } = state;
@@ -63,7 +63,7 @@ describe('IDE merge requests list', () => {
it('renders loading icon when merge request is loading', () => {
createComponent({ mergeRequests: { isLoading: true } });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders no search results text when search is not empty', async () => {
@@ -107,8 +107,8 @@ describe('IDE merge requests list', () => {
it('renders list', () => {
createComponent(defaultStateWithMergeRequests);
- expect(wrapper.findAll(Item).length).toBe(1);
- expect(wrapper.find(Item).props('item')).toBe(
+ expect(wrapper.findAllComponents(Item).length).toBe(1);
+ expect(wrapper.findComponent(Item).props('item')).toBe(
defaultStateWithMergeRequests.mergeRequests.mergeRequests[0],
);
});
diff --git a/spec/frontend/ide/components/new_dropdown/index_spec.js b/spec/frontend/ide/components/new_dropdown/index_spec.js
index 19dcd9569b3..747c099db33 100644
--- a/spec/frontend/ide/components/new_dropdown/index_spec.js
+++ b/spec/frontend/ide/components/new_dropdown/index_spec.js
@@ -1,70 +1,66 @@
-import Vue, { nextTick } from 'vue';
-import { createComponentWithStore } from 'helpers/vue_mount_component_helper';
-import newDropdown from '~/ide/components/new_dropdown/index.vue';
+import { mount } from '@vue/test-utils';
+import NewDropdown from '~/ide/components/new_dropdown/index.vue';
+import Button from '~/ide/components/new_dropdown/button.vue';
import { createStore } from '~/ide/stores';
describe('new dropdown component', () => {
- let store;
- let vm;
-
- beforeEach(() => {
- store = createStore();
-
- const component = Vue.extend(newDropdown);
-
- vm = createComponentWithStore(component, store, {
- branch: 'main',
- path: '',
- mouseOver: false,
- type: 'tree',
+ let wrapper;
+
+ const findAllButtons = () => wrapper.findAllComponents(Button);
+
+ const mountComponent = () => {
+ const store = createStore();
+ store.state.currentProjectId = 'abcproject';
+ store.state.path = '';
+ store.state.trees['abcproject/mybranch'] = { tree: [] };
+
+ wrapper = mount(NewDropdown, {
+ store,
+ propsData: {
+ branch: 'main',
+ path: '',
+ mouseOver: false,
+ type: 'tree',
+ },
});
+ };
- vm.$store.state.currentProjectId = 'abcproject';
- vm.$store.state.path = '';
- vm.$store.state.trees['abcproject/mybranch'] = {
- tree: [],
- };
-
- vm.$mount();
-
- jest.spyOn(vm.$refs.newModal, 'open').mockImplementation(() => {});
+ beforeEach(() => {
+ mountComponent();
+ jest.spyOn(wrapper.vm.$refs.newModal, 'open').mockImplementation(() => {});
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders new file, upload and new directory links', () => {
- const buttons = vm.$el.querySelectorAll('.dropdown-menu button');
-
- expect(buttons[0].textContent.trim()).toBe('New file');
- expect(buttons[1].textContent.trim()).toBe('Upload file');
- expect(buttons[2].textContent.trim()).toBe('New directory');
+ expect(findAllButtons().at(0).text()).toBe('New file');
+ expect(findAllButtons().at(1).text()).toBe('Upload file');
+ expect(findAllButtons().at(2).text()).toBe('New directory');
});
describe('createNewItem', () => {
it('opens modal for a blob when new file is clicked', () => {
- vm.$el.querySelectorAll('.dropdown-menu button')[0].click();
+ findAllButtons().at(0).trigger('click');
- expect(vm.$refs.newModal.open).toHaveBeenCalledWith('blob', '');
+ expect(wrapper.vm.$refs.newModal.open).toHaveBeenCalledWith('blob', '');
});
it('opens modal for a tree when new directory is clicked', () => {
- vm.$el.querySelectorAll('.dropdown-menu button')[2].click();
+ findAllButtons().at(2).trigger('click');
- expect(vm.$refs.newModal.open).toHaveBeenCalledWith('tree', '');
+ expect(wrapper.vm.$refs.newModal.open).toHaveBeenCalledWith('tree', '');
});
});
describe('isOpen', () => {
it('scrolls dropdown into view', async () => {
- jest.spyOn(vm.$refs.dropdownMenu, 'scrollIntoView').mockImplementation(() => {});
-
- vm.isOpen = true;
+ jest.spyOn(wrapper.vm.$refs.dropdownMenu, 'scrollIntoView').mockImplementation(() => {});
- await nextTick();
+ await wrapper.setProps({ isOpen: true });
- expect(vm.$refs.dropdownMenu.scrollIntoView).toHaveBeenCalledWith({
+ expect(wrapper.vm.$refs.dropdownMenu.scrollIntoView).toHaveBeenCalledWith({
block: 'nearest',
});
});
@@ -72,11 +68,11 @@ describe('new dropdown component', () => {
describe('delete entry', () => {
it('calls delete action', () => {
- jest.spyOn(vm, 'deleteEntry').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'deleteEntry').mockImplementation(() => {});
- vm.$el.querySelectorAll('.dropdown-menu button')[4].click();
+ findAllButtons().at(4).trigger('click');
- expect(vm.deleteEntry).toHaveBeenCalledWith('');
+ expect(wrapper.vm.deleteEntry).toHaveBeenCalledWith('');
});
});
});
diff --git a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
index 7f2ee0fe7d9..1d38231a767 100644
--- a/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
+++ b/spec/frontend/ide/components/panes/collapsible_sidebar_spec.js
@@ -27,7 +27,7 @@ describe('ide/components/panes/collapsible_sidebar.vue', () => {
});
};
- const findSidebarNav = () => wrapper.find(IdeSidebarNav);
+ const findSidebarNav = () => wrapper.findComponent(IdeSidebarNav);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/ide/components/panes/right_spec.js b/spec/frontend/ide/components/panes/right_spec.js
index d12acd6dc4c..4555f519bc2 100644
--- a/spec/frontend/ide/components/panes/right_spec.js
+++ b/spec/frontend/ide/components/panes/right_spec.js
@@ -37,7 +37,7 @@ describe('ide/components/panes/right.vue', () => {
it('is always shown', () => {
createComponent();
- expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect(wrapper.findComponent(CollapsibleSidebar).props('extensionTabs')).toEqual(
expect.arrayContaining([
expect.objectContaining({
show: true,
@@ -65,7 +65,7 @@ describe('ide/components/panes/right.vue', () => {
createComponent();
- expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect(wrapper.findComponent(CollapsibleSidebar).props('extensionTabs')).toEqual(
expect.arrayContaining([
expect.objectContaining({
show: true,
@@ -90,7 +90,7 @@ describe('ide/components/panes/right.vue', () => {
store.state.terminal.isVisible = true;
await nextTick();
- expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect(wrapper.findComponent(CollapsibleSidebar).props('extensionTabs')).toEqual(
expect.arrayContaining([
expect.objectContaining({
show: true,
@@ -103,7 +103,7 @@ describe('ide/components/panes/right.vue', () => {
it('hides terminal tab when not visible', () => {
store.state.terminal.isVisible = false;
- expect(wrapper.find(CollapsibleSidebar).props('extensionTabs')).toEqual(
+ expect(wrapper.findComponent(CollapsibleSidebar).props('extensionTabs')).toEqual(
expect.arrayContaining([
expect.objectContaining({
show: false,
diff --git a/spec/frontend/ide/components/pipelines/empty_state_spec.js b/spec/frontend/ide/components/pipelines/empty_state_spec.js
index f7409fc36be..31081e8f9d5 100644
--- a/spec/frontend/ide/components/pipelines/empty_state_spec.js
+++ b/spec/frontend/ide/components/pipelines/empty_state_spec.js
@@ -32,7 +32,7 @@ describe('~/ide/components/pipelines/empty_state.vue', () => {
});
it('renders empty state', () => {
- expect(wrapper.find(GlEmptyState).props()).toMatchObject({
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchObject({
title: EmptyState.i18n.title,
description: EmptyState.i18n.description,
primaryButtonText: EmptyState.i18n.primaryButtonText,
diff --git a/spec/frontend/ide/components/pipelines/list_spec.js b/spec/frontend/ide/components/pipelines/list_spec.js
index 8a3606e27eb..545924c9c11 100644
--- a/spec/frontend/ide/components/pipelines/list_spec.js
+++ b/spec/frontend/ide/components/pipelines/list_spec.js
@@ -99,7 +99,7 @@ describe('IDE pipelines list', () => {
},
);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('renders loading state', () => {
@@ -111,7 +111,7 @@ describe('IDE pipelines list', () => {
},
);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
@@ -128,7 +128,7 @@ describe('IDE pipelines list', () => {
it('renders empty state when no latestPipeline', () => {
createComponent({}, { ...defaultPipelinesLoadedState, latestPipeline: null });
- expect(wrapper.find(EmptyState).exists()).toBe(true);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(true);
expect(wrapper.element).toMatchSnapshot();
});
@@ -144,7 +144,7 @@ describe('IDE pipelines list', () => {
it('renders ci icon', () => {
createComponent({}, withLatestPipelineState);
- expect(wrapper.find(CiIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(CiIcon).exists()).toBe(true);
});
it('renders pipeline data', () => {
@@ -158,7 +158,7 @@ describe('IDE pipelines list', () => {
const isLoadingJobs = true;
createComponent({}, { ...withLatestPipelineState, stages, isLoadingJobs });
- const jobProps = wrapper.findAll(GlTab).at(0).find(JobsList).props();
+ const jobProps = wrapper.findAllComponents(GlTab).at(0).findComponent(JobsList).props();
expect(jobProps.stages).toBe(stages);
expect(jobProps.loading).toBe(isLoadingJobs);
});
@@ -169,7 +169,7 @@ describe('IDE pipelines list', () => {
const isLoadingJobs = true;
createComponent({}, { ...withLatestPipelineState, isLoadingJobs });
- const jobProps = wrapper.findAll(GlTab).at(1).find(JobsList).props();
+ const jobProps = wrapper.findAllComponents(GlTab).at(1).findComponent(JobsList).props();
expect(jobProps.stages).toBe(failedStages);
expect(jobProps.loading).toBe(isLoadingJobs);
});
diff --git a/spec/frontend/ide/components/preview/clientside_spec.js b/spec/frontend/ide/components/preview/clientside_spec.js
index 426fbd5c04c..cf768114e70 100644
--- a/spec/frontend/ide/components/preview/clientside_spec.js
+++ b/spec/frontend/ide/components/preview/clientside_spec.js
@@ -396,7 +396,7 @@ describe('IDE clientside preview', () => {
wrapper.setData({ loading: true });
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/components/preview/navigator_spec.js b/spec/frontend/ide/components/preview/navigator_spec.js
index a199f4704f7..9c4f825ccf5 100644
--- a/spec/frontend/ide/components/preview/navigator_spec.js
+++ b/spec/frontend/ide/components/preview/navigator_spec.js
@@ -37,13 +37,13 @@ describe('IDE clientside preview navigator', () => {
});
it('renders loading icon by default', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('removes loading icon when done event is fired', async () => {
listenHandler({ type: 'done' });
await nextTick();
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
it('does not count visiting same url multiple times', async () => {
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
index db4181395d3..d3312358402 100644
--- a/spec/frontend/ide/components/repo_commit_section_spec.js
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -77,8 +77,10 @@ describe('RepoCommitSection', () => {
});
it('renders no changes text', () => {
- expect(wrapper.find(EmptyState).text().trim()).toContain('No changes');
- expect(wrapper.find(EmptyState).find('img').attributes('src')).toBe(TEST_NO_CHANGES_SVG);
+ expect(wrapper.findComponent(EmptyState).text().trim()).toContain('No changes');
+ expect(wrapper.findComponent(EmptyState).find('img').attributes('src')).toBe(
+ TEST_NO_CHANGES_SVG,
+ );
});
});
@@ -111,7 +113,7 @@ describe('RepoCommitSection', () => {
});
it('does not show empty state', () => {
- expect(wrapper.find(EmptyState).exists()).toBe(false);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(false);
});
});
@@ -157,7 +159,7 @@ describe('RepoCommitSection', () => {
});
it('does not show empty state', () => {
- expect(wrapper.find(EmptyState).exists()).toBe(false);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(false);
});
});
@@ -167,7 +169,7 @@ describe('RepoCommitSection', () => {
beforeEach(async () => {
createComponent();
- inititializeSpy = jest.spyOn(wrapper.find(RepoCommitSection).vm, 'initialize');
+ inititializeSpy = jest.spyOn(wrapper.findComponent(RepoCommitSection).vm, 'initialize');
store.state.viewer = 'diff';
await wrapper.vm.reactivate();
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 7a0bcda1b7a..9921d8cba18 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -145,8 +145,7 @@ describe('RepoEditor', () => {
jest.clearAllMocks();
// create a new model each time, otherwise tests conflict with each other
// because of same model being used in multiple tests
- // eslint-disable-next-line no-undef
- monaco.editor.getModels().forEach((model) => model.dispose());
+ monacoEditor.getModels().forEach((model) => model.dispose());
wrapper.destroy();
wrapper = null;
});
@@ -212,7 +211,7 @@ describe('RepoEditor', () => {
it('renders markdown for tempFile', async () => {
findPreviewTab().vm.$emit('click');
await waitForPromises();
- expect(wrapper.find(ContentViewer).html()).toContain(dummyFile.text.content);
+ expect(wrapper.findComponent(ContentViewer).html()).toContain(dummyFile.text.content);
});
describe('when file changes to non-markdown file', () => {
diff --git a/spec/frontend/ide/components/repo_tab_spec.js b/spec/frontend/ide/components/repo_tab_spec.js
index b16fd8f80ba..b26edc5a85b 100644
--- a/spec/frontend/ide/components/repo_tab_spec.js
+++ b/spec/frontend/ide/components/repo_tab_spec.js
@@ -19,7 +19,7 @@ describe('RepoTab', () => {
let store;
let router;
- const findTab = () => wrapper.find(GlTabStub);
+ const findTab = () => wrapper.findComponent(GlTabStub);
function createComponent(propsData) {
wrapper = mount(RepoTab, {
@@ -164,7 +164,7 @@ describe('RepoTab', () => {
await wrapper.find('.multi-file-tab-close').trigger('click');
- expect(tab.opened).toBeFalsy();
+ expect(tab.opened).toBe(false);
expect(wrapper.vm.$store.state.changedFiles).toHaveLength(1);
});
@@ -180,7 +180,7 @@ describe('RepoTab', () => {
await wrapper.find('.multi-file-tab-close').trigger('click');
- expect(tab.opened).toBeFalsy();
+ expect(tab.opened).toBe(false);
});
});
});
diff --git a/spec/frontend/ide/components/resizable_panel_spec.js b/spec/frontend/ide/components/resizable_panel_spec.js
index 55b9423aba8..fe2a128c9c8 100644
--- a/spec/frontend/ide/components/resizable_panel_spec.js
+++ b/spec/frontend/ide/components/resizable_panel_spec.js
@@ -35,7 +35,7 @@ describe('~/ide/components/resizable_panel', () => {
store,
});
};
- const findResizer = () => wrapper.find(PanelResizer);
+ const findResizer = () => wrapper.findComponent(PanelResizer);
const findInlineStyle = () => wrapper.element.style.cssText;
const createInlineStyle = (width) => `width: ${width}px;`;
diff --git a/spec/frontend/ide/components/shared/commit_message_field_spec.js b/spec/frontend/ide/components/shared/commit_message_field_spec.js
index f4f9b95b233..94da06f4cb2 100644
--- a/spec/frontend/ide/components/shared/commit_message_field_spec.js
+++ b/spec/frontend/ide/components/shared/commit_message_field_spec.js
@@ -79,7 +79,7 @@ describe('CommitMessageField', () => {
await fillText(text);
expect(findHighlightsText().text()).toEqual(text);
- expect(findHighlightsMark().text()).toBeFalsy();
+ expect(findHighlightsMark().text()).toBe('');
});
it('highlights characters over 50 length', async () => {
diff --git a/spec/frontend/ide/components/terminal/empty_state_spec.js b/spec/frontend/ide/components/terminal/empty_state_spec.js
index 57c816747aa..15fb0fe9013 100644
--- a/spec/frontend/ide/components/terminal/empty_state_spec.js
+++ b/spec/frontend/ide/components/terminal/empty_state_spec.js
@@ -46,7 +46,7 @@ describe('IDE TerminalEmptyState', () => {
},
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('when not loading, does not show loading icon', () => {
@@ -56,7 +56,7 @@ describe('IDE TerminalEmptyState', () => {
},
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
describe('when valid', () => {
@@ -71,7 +71,7 @@ describe('IDE TerminalEmptyState', () => {
},
});
- button = wrapper.find(GlButton);
+ button = wrapper.findComponent(GlButton);
});
it('shows button', () => {
@@ -100,7 +100,7 @@ describe('IDE TerminalEmptyState', () => {
},
});
- expect(wrapper.find(GlButton).props('disabled')).toBe(true);
- expect(wrapper.find(GlAlert).html()).toContain(TEST_HTML_MESSAGE);
+ expect(wrapper.findComponent(GlButton).props('disabled')).toBe(true);
+ expect(wrapper.findComponent(GlAlert).html()).toContain(TEST_HTML_MESSAGE);
});
});
diff --git a/spec/frontend/ide/components/terminal/session_spec.js b/spec/frontend/ide/components/terminal/session_spec.js
index 6a70ddb46a8..7e4a56b0610 100644
--- a/spec/frontend/ide/components/terminal/session_spec.js
+++ b/spec/frontend/ide/components/terminal/session_spec.js
@@ -38,7 +38,7 @@ describe('IDE TerminalSession', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
state = {
@@ -60,7 +60,7 @@ describe('IDE TerminalSession', () => {
it('shows terminal', () => {
factory();
- expect(wrapper.find(Terminal).props()).toEqual({
+ expect(wrapper.findComponent(Terminal).props()).toEqual({
terminalPath: TEST_TERMINAL_PATH,
status: RUNNING,
});
diff --git a/spec/frontend/ide/components/terminal/terminal_controls_spec.js b/spec/frontend/ide/components/terminal/terminal_controls_spec.js
index 71ec0dca89d..c18934f0f3b 100644
--- a/spec/frontend/ide/components/terminal/terminal_controls_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_controls_spec.js
@@ -12,7 +12,7 @@ describe('IDE TerminalControls', () => {
...options,
});
- buttons = wrapper.findAll(ScrollButton);
+ buttons = wrapper.findAllComponents(ScrollButton);
};
it('shows an up and down scroll button', () => {
diff --git a/spec/frontend/ide/components/terminal/terminal_spec.js b/spec/frontend/ide/components/terminal/terminal_spec.js
index afc49e22c83..4da3e1910e9 100644
--- a/spec/frontend/ide/components/terminal/terminal_spec.js
+++ b/spec/frontend/ide/components/terminal/terminal_spec.js
@@ -68,7 +68,7 @@ describe('IDE Terminal', () => {
it(`shows when starting (${status})`, () => {
factory({ status });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.find('.top-bar').text()).toBe('Starting...');
});
});
@@ -76,7 +76,7 @@ describe('IDE Terminal', () => {
it(`shows when stopping`, () => {
factory({ status: STOPPING });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
expect(wrapper.find('.top-bar').text()).toBe('Stopping...');
});
@@ -84,7 +84,7 @@ describe('IDE Terminal', () => {
it('hides when not loading', () => {
factory({ status });
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.find('.top-bar').text()).toBe('');
});
});
@@ -107,23 +107,23 @@ describe('IDE Terminal', () => {
});
it('is visible if terminal is created', () => {
- expect(wrapper.find(TerminalControls).exists()).toBe(true);
+ expect(wrapper.findComponent(TerminalControls).exists()).toBe(true);
});
it('scrolls glterminal on scroll-up', () => {
- wrapper.find(TerminalControls).vm.$emit('scroll-up');
+ wrapper.findComponent(TerminalControls).vm.$emit('scroll-up');
expect(wrapper.vm.glterminal.scrollToTop).toHaveBeenCalled();
});
it('scrolls glterminal on scroll-down', () => {
- wrapper.find(TerminalControls).vm.$emit('scroll-down');
+ wrapper.findComponent(TerminalControls).vm.$emit('scroll-down');
expect(wrapper.vm.glterminal.scrollToBottom).toHaveBeenCalled();
});
it('has props set', () => {
- expect(wrapper.find(TerminalControls).props()).toEqual({
+ expect(wrapper.findComponent(TerminalControls).props()).toEqual({
canScrollUp: false,
canScrollDown: false,
});
@@ -133,7 +133,7 @@ describe('IDE Terminal', () => {
wrapper.setData({ canScrollUp: true, canScrollDown: true });
return nextTick().then(() => {
- expect(wrapper.find(TerminalControls).props()).toEqual({
+ expect(wrapper.findComponent(TerminalControls).props()).toEqual({
canScrollUp: true,
canScrollDown: true,
});
diff --git a/spec/frontend/ide/components/terminal/view_spec.js b/spec/frontend/ide/components/terminal/view_spec.js
index 49f9513d2ac..57c8da9f5b7 100644
--- a/spec/frontend/ide/components/terminal/view_spec.js
+++ b/spec/frontend/ide/components/terminal/view_spec.js
@@ -66,7 +66,7 @@ describe('IDE TerminalView', () => {
it('renders empty state', async () => {
await factory();
- expect(wrapper.find(TerminalEmptyState).props()).toEqual({
+ expect(wrapper.findComponent(TerminalEmptyState).props()).toEqual({
helpPath: TEST_HELP_PATH,
illustrationPath: TEST_SVG_PATH,
...getters.allCheck(),
@@ -79,7 +79,7 @@ describe('IDE TerminalView', () => {
expect(actions.startSession).not.toHaveBeenCalled();
expect(actions.hideSplash).not.toHaveBeenCalled();
- wrapper.find(TerminalEmptyState).vm.$emit('start');
+ wrapper.findComponent(TerminalEmptyState).vm.$emit('start');
expect(actions.startSession).toHaveBeenCalled();
expect(actions.hideSplash).toHaveBeenCalled();
@@ -89,7 +89,7 @@ describe('IDE TerminalView', () => {
state.isShowSplash = false;
await factory();
- expect(wrapper.find(TerminalEmptyState).exists()).toBe(false);
- expect(wrapper.find(TerminalSession).exists()).toBe(true);
+ expect(wrapper.findComponent(TerminalEmptyState).exists()).toBe(false);
+ expect(wrapper.findComponent(TerminalSession).exists()).toBe(true);
});
});
diff --git a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
index f921037d744..5b1502cc190 100644
--- a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
+++ b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_safe_spec.js
@@ -34,13 +34,13 @@ describe('ide/components/terminal_sync/terminal_sync_status_safe', () => {
});
it('renders terminal sync status', () => {
- expect(wrapper.find(TerminalSyncStatus).exists()).toBe(true);
+ expect(wrapper.findComponent(TerminalSyncStatus).exists()).toBe(true);
});
});
describe('without terminal sync module', () => {
it('does not render terminal sync status', () => {
- expect(wrapper.find(TerminalSyncStatus).exists()).toBe(false);
+ expect(wrapper.findComponent(TerminalSyncStatus).exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
index 3a326b08fff..147235abc8e 100644
--- a/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
+++ b/spec/frontend/ide/components/terminal_sync/terminal_sync_status_spec.js
@@ -78,19 +78,19 @@ describe('ide/components/terminal_sync/terminal_sync_status', () => {
if (!icon) {
it('does not render icon', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
});
it('renders loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
} else {
it('renders icon', () => {
- expect(wrapper.find(GlIcon).props('name')).toEqual(icon);
+ expect(wrapper.findComponent(GlIcon).props('name')).toEqual(icon);
});
it('does not render loading icon', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
});
}
});
diff --git a/spec/frontend/ide/lib/common/model_manager_spec.js b/spec/frontend/ide/lib/common/model_manager_spec.js
index 08e4ab0f113..e485873e8da 100644
--- a/spec/frontend/ide/lib/common/model_manager_spec.js
+++ b/spec/frontend/ide/lib/common/model_manager_spec.js
@@ -59,7 +59,7 @@ describe('Multi-file editor library model manager', () => {
describe('hasCachedModel', () => {
it('returns false when no models exist', () => {
- expect(instance.hasCachedModel('path')).toBeFalsy();
+ expect(instance.hasCachedModel('path')).toBe(false);
});
it('returns true when model exists', () => {
@@ -67,7 +67,7 @@ describe('Multi-file editor library model manager', () => {
instance.addModel(f);
- expect(instance.hasCachedModel(f.key)).toBeTruthy();
+ expect(instance.hasCachedModel(f.key)).toBe(true);
});
});
diff --git a/spec/frontend/ide/lib/diff/diff_spec.js b/spec/frontend/ide/lib/diff/diff_spec.js
index 901f9e7cfd1..208ed9bf759 100644
--- a/spec/frontend/ide/lib/diff/diff_spec.js
+++ b/spec/frontend/ide/lib/diff/diff_spec.js
@@ -18,8 +18,8 @@ describe('Multi-file editor library diff calculator', () => {
({ originalContent, newContent, lineNumber }) => {
const diff = computeDiff(originalContent, newContent)[0];
- expect(diff.added).toBeTruthy();
- expect(diff.modified).toBeTruthy();
+ expect(diff.added).toBe(true);
+ expect(diff.modified).toBe(true);
expect(diff.removed).toBeUndefined();
expect(diff.lineNumber).toBe(lineNumber);
},
@@ -36,7 +36,7 @@ describe('Multi-file editor library diff calculator', () => {
({ originalContent, newContent, lineNumber }) => {
const diff = computeDiff(originalContent, newContent)[0];
- expect(diff.added).toBeTruthy();
+ expect(diff.added).toBe(true);
expect(diff.modified).toBeUndefined();
expect(diff.removed).toBeUndefined();
expect(diff.lineNumber).toBe(lineNumber);
@@ -56,7 +56,7 @@ describe('Multi-file editor library diff calculator', () => {
expect(diff.added).toBeUndefined();
expect(diff.modified).toBe(modified);
- expect(diff.removed).toBeTruthy();
+ expect(diff.removed).toBe(true);
expect(diff.lineNumber).toBe(lineNumber);
},
);
diff --git a/spec/frontend/ide/stores/actions/file_spec.js b/spec/frontend/ide/stores/actions/file_spec.js
index 6c1dee1e5ca..d1c31cd412b 100644
--- a/spec/frontend/ide/stores/actions/file_spec.js
+++ b/spec/frontend/ide/stores/actions/file_spec.js
@@ -60,8 +60,8 @@ describe('IDE store file actions', () => {
it('closes open files', () => {
return store.dispatch('closeFile', localFile).then(() => {
- expect(localFile.opened).toBeFalsy();
- expect(localFile.active).toBeFalsy();
+ expect(localFile.opened).toBe(false);
+ expect(localFile.active).toBe(false);
expect(store.state.openFiles.length).toBe(0);
});
});
@@ -269,7 +269,7 @@ describe('IDE store file actions', () => {
it('sets the file as active', () => {
return store.dispatch('getFileData', { path: localFile.path }).then(() => {
- expect(localFile.active).toBeTruthy();
+ expect(localFile.active).toBe(true);
});
});
@@ -277,7 +277,7 @@ describe('IDE store file actions', () => {
return store
.dispatch('getFileData', { path: localFile.path, makeFileActive: false })
.then(() => {
- expect(localFile.active).toBeFalsy();
+ expect(localFile.active).toBe(false);
});
});
diff --git a/spec/frontend/ide/stores/actions/tree_spec.js b/spec/frontend/ide/stores/actions/tree_spec.js
index d43393875eb..6e8a03b47ad 100644
--- a/spec/frontend/ide/stores/actions/tree_spec.js
+++ b/spec/frontend/ide/stores/actions/tree_spec.js
@@ -134,7 +134,7 @@ describe('Multi-file store tree actions', () => {
it('toggles the tree open', async () => {
await store.dispatch('toggleTreeOpen', tree.path);
- expect(tree.opened).toBeTruthy();
+ expect(tree.opened).toBe(true);
});
});
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index 53d161ae5c9..24661e21cd0 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -268,7 +268,7 @@ describe('IDE store getters', () => {
currentProject: undefined,
};
- expect(getters.isOnDefaultBranch({}, localGetters)).toBeFalsy();
+ expect(getters.isOnDefaultBranch({}, localGetters)).toBe(undefined);
});
it("returns true when project's default branch matches current branch", () => {
@@ -279,7 +279,7 @@ describe('IDE store getters', () => {
branchName: 'main',
};
- expect(getters.isOnDefaultBranch({}, localGetters)).toBeTruthy();
+ expect(getters.isOnDefaultBranch({}, localGetters)).toBe(true);
});
it("returns false when project's default branch doesn't match current branch", () => {
@@ -290,7 +290,7 @@ describe('IDE store getters', () => {
branchName: 'feature',
};
- expect(getters.isOnDefaultBranch({}, localGetters)).toBeFalsy();
+ expect(getters.isOnDefaultBranch({}, localGetters)).toBe(false);
});
});
diff --git a/spec/frontend/ide/stores/modules/commit/getters_spec.js b/spec/frontend/ide/stores/modules/commit/getters_spec.js
index 1e34087b290..38ebe36c2c5 100644
--- a/spec/frontend/ide/stores/modules/commit/getters_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/getters_spec.js
@@ -14,21 +14,21 @@ describe('IDE commit module getters', () => {
describe('discardDraftButtonDisabled', () => {
it('returns true when commitMessage is empty', () => {
- expect(getters.discardDraftButtonDisabled(state)).toBeTruthy();
+ expect(getters.discardDraftButtonDisabled(state)).toBe(true);
});
it('returns false when commitMessage is not empty & loading is false', () => {
state.commitMessage = 'test';
state.submitCommitLoading = false;
- expect(getters.discardDraftButtonDisabled(state)).toBeFalsy();
+ expect(getters.discardDraftButtonDisabled(state)).toBe(false);
});
it('returns true when commitMessage is not empty & loading is true', () => {
state.commitMessage = 'test';
state.submitCommitLoading = true;
- expect(getters.discardDraftButtonDisabled(state)).toBeTruthy();
+ expect(getters.discardDraftButtonDisabled(state)).toBe(true);
});
});
@@ -152,13 +152,13 @@ describe('IDE commit module getters', () => {
it('returns false if NOT creating a new branch', () => {
state.commitAction = COMMIT_TO_CURRENT_BRANCH;
- expect(getters.isCreatingNewBranch(state)).toBeFalsy();
+ expect(getters.isCreatingNewBranch(state)).toBe(false);
});
it('returns true if creating a new branch', () => {
state.commitAction = COMMIT_TO_NEW_BRANCH;
- expect(getters.isCreatingNewBranch(state)).toBeTruthy();
+ expect(getters.isCreatingNewBranch(state)).toBe(true);
});
});
@@ -183,7 +183,7 @@ describe('IDE commit module getters', () => {
});
it('should never hide "New MR" option', () => {
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeNull();
});
});
@@ -195,13 +195,13 @@ describe('IDE commit module getters', () => {
it('should NOT hide "New MR" option if user can NOT push to the current branch', () => {
rootGetters.canPushToBranch = false;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(false);
});
it('should hide "New MR" option if user can push to the current branch', () => {
rootGetters.canPushToBranch = true;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeTruthy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(true);
});
});
@@ -211,7 +211,7 @@ describe('IDE commit module getters', () => {
});
it('should never hide "New MR" option', () => {
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeNull();
});
});
@@ -223,13 +223,13 @@ describe('IDE commit module getters', () => {
it('should NOT hide "New MR" option if there is NO existing MR for the current branch', () => {
rootGetters.hasMergeRequest = false;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeNull();
});
it('should hide "New MR" option if there is existing MR for the current branch', () => {
rootGetters.hasMergeRequest = true;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeTruthy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(true);
});
});
@@ -247,17 +247,13 @@ describe('IDE commit module getters', () => {
it('should hide "New MR" when there is an existing MR', () => {
rootGetters.hasMergeRequest = true;
- expect(
- getters.shouldHideNewMrOption(state, localGetters, null, rootGetters),
- ).toBeTruthy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(true);
});
it('should hide "New MR" when there is no existing MR', () => {
rootGetters.hasMergeRequest = false;
- expect(
- getters.shouldHideNewMrOption(state, localGetters, null, rootGetters),
- ).toBeTruthy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(true);
});
});
@@ -270,17 +266,17 @@ describe('IDE commit module getters', () => {
rootGetters.hasMergeRequest = false;
rootGetters.canPushToBranch = true;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(false);
rootGetters.hasMergeRequest = true;
rootGetters.canPushToBranch = true;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(false);
rootGetters.hasMergeRequest = false;
rootGetters.canPushToBranch = false;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(false);
});
});
});
@@ -292,7 +288,7 @@ describe('IDE commit module getters', () => {
rootGetters.hasMergeRequest = true;
rootGetters.canPushToBranch = true;
- expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBeFalsy();
+ expect(getters.shouldHideNewMrOption(state, localGetters, null, rootGetters)).toBe(false);
});
});
diff --git a/spec/frontend/ide/stores/mutations/file_spec.js b/spec/frontend/ide/stores/mutations/file_spec.js
index 1453f26c1d9..69ec2e7a6f5 100644
--- a/spec/frontend/ide/stores/mutations/file_spec.js
+++ b/spec/frontend/ide/stores/mutations/file_spec.js
@@ -22,7 +22,7 @@ describe('IDE store file mutations', () => {
active: true,
});
- expect(localFile.active).toBeTruthy();
+ expect(localFile.active).toBe(true);
});
it('sets pending tab as not active', () => {
@@ -41,7 +41,7 @@ describe('IDE store file mutations', () => {
it('adds into opened files', () => {
mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
- expect(localFile.opened).toBeTruthy();
+ expect(localFile.opened).toBe(true);
expect(localState.openFiles.length).toBe(1);
});
@@ -50,7 +50,7 @@ describe('IDE store file mutations', () => {
mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
mutations.TOGGLE_FILE_OPEN(localState, localFile.path);
- expect(localFile.opened).toBeFalsy();
+ expect(localFile.opened).toBe(false);
expect(localState.openFiles.length).toBe(0);
});
});
@@ -162,7 +162,7 @@ describe('IDE store file mutations', () => {
callMutationForFile(localFile);
- expect(localFile.raw).toBeFalsy();
+ expect(localFile.raw).toEqual('');
expect(localState.stagedFiles[0].raw).toBe('testing');
});
@@ -172,7 +172,7 @@ describe('IDE store file mutations', () => {
callMutationForFile(localFile);
- expect(localFile.raw).toBeFalsy();
+ expect(localFile.raw).toEqual('');
expect(localFile.content).toBe('testing');
});
@@ -202,7 +202,7 @@ describe('IDE store file mutations', () => {
callMutationForFile(localFile);
- expect(localFile.raw).toBeFalsy();
+ expect(localFile.raw).toEqual('');
expect(localState.stagedFiles[0].raw).toBe('testing');
});
});
@@ -239,7 +239,7 @@ describe('IDE store file mutations', () => {
});
expect(localFile.content).toBe('testing');
- expect(localFile.changed).toBeTruthy();
+ expect(localFile.changed).toBe(true);
});
it('sets changed if file is a temp file', () => {
@@ -250,7 +250,7 @@ describe('IDE store file mutations', () => {
content: '',
});
- expect(localFile.changed).toBeTruthy();
+ expect(localFile.changed).toBe(true);
});
});
@@ -329,7 +329,7 @@ describe('IDE store file mutations', () => {
mutations.DISCARD_FILE_CHANGES(localState, localFile.path);
expect(localFile.content).toBe('');
- expect(localFile.changed).toBeFalsy();
+ expect(localFile.changed).toBe(false);
});
it('adds to root tree if deleted', () => {
@@ -527,7 +527,7 @@ describe('IDE store file mutations', () => {
changed: true,
});
- expect(localFile.changed).toBeTruthy();
+ expect(localFile.changed).toBe(true);
});
});
diff --git a/spec/frontend/ide/stores/mutations/merge_request_spec.js b/spec/frontend/ide/stores/mutations/merge_request_spec.js
index afbe6770c0d..2af06835181 100644
--- a/spec/frontend/ide/stores/mutations/merge_request_spec.js
+++ b/spec/frontend/ide/stores/mutations/merge_request_spec.js
@@ -30,7 +30,7 @@ describe('IDE store merge request mutations', () => {
const newMr = localState.projects.abcproject.mergeRequests[1];
expect(newMr.title).toBe('mr');
- expect(newMr.active).toBeTruthy();
+ expect(newMr.active).toBe(true);
});
it('keeps original data', () => {
diff --git a/spec/frontend/ide/utils_spec.js b/spec/frontend/ide/utils_spec.js
index 2f8447af518..fd9d481251d 100644
--- a/spec/frontend/ide/utils_spec.js
+++ b/spec/frontend/ide/utils_spec.js
@@ -46,7 +46,7 @@ describe('WebIDE utils', () => {
content: 'SELECT "éêė" from tablename',
mimeType: 'application/sql',
}),
- ).toBeFalsy();
+ ).toBe(false);
});
it('returns true for ASCII only content for unknown types', () => {
@@ -56,7 +56,7 @@ describe('WebIDE utils', () => {
content: 'plain text',
mimeType: 'application/x-new-type',
}),
- ).toBeTruthy();
+ ).toBe(true);
});
it('returns false for non-ASCII content for unknown types', () => {
@@ -66,7 +66,7 @@ describe('WebIDE utils', () => {
content: '{"éêė":"value"}',
mimeType: 'application/octet-stream',
}),
- ).toBeFalsy();
+ ).toBe(false);
});
it.each`
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index ee2f6541b03..5af0e272285 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -204,7 +204,7 @@ describe('DynamicField', () => {
});
expect(findGlFormGroup().find('small').html()).toContain(
- '[<code>1</code> <a>3</a> <a target="_blank" href="foo">4</a>]',
+ '[<code>1</code> <a>3</a> <a href="foo">4</a>]',
);
});
});
diff --git a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
index 6aa3e661677..fd60d7f817f 100644
--- a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
+++ b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
@@ -15,6 +15,7 @@ import UrlSync from '~/vue_shared/components/url_sync.vue';
const mockOverrides = Array(DEFAULT_PER_PAGE * 3)
.fill(1)
.map((_, index) => ({
+ id: index,
name: `test-proj-${index}`,
avatar_url: `avatar-${index}`,
full_path: `test-proj-${index}`,
@@ -59,6 +60,7 @@ describe('IntegrationOverrides', () => {
const avatar = link.findComponent(ProjectAvatar);
return {
+ id: avatar.props('projectId'),
href: link.attributes('href'),
avatarUrl: avatar.props('projectAvatarUrl'),
avatarName: avatar.props('projectName'),
@@ -90,7 +92,7 @@ describe('IntegrationOverrides', () => {
const table = findGlTable();
expect(table.exists()).toBe(true);
- expect(table.attributes('busy')).toBeFalsy();
+ expect(table.attributes('busy')).toBeUndefined();
});
it('renders IntegrationTabs with count', async () => {
@@ -109,6 +111,7 @@ describe('IntegrationOverrides', () => {
it('renders overrides as rows in table', () => {
expect(findRowsAsModel()).toEqual(
mockOverrides.map((x) => ({
+ id: x.id,
href: x.full_path,
avatarUrl: x.avatar_url,
avatarName: x.name,
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 045a454e63a..2058784b033 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlLink, GlModal, GlSprintf, GlFormGroup } from '@gitlab/ui';
+import { GlLink, GlModal, GlSprintf, GlFormGroup, GlCollapse, GlIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { stubComponent } from 'helpers/stub_component';
@@ -18,6 +18,7 @@ import {
MEMBERS_PLACEHOLDER_DISABLED,
MEMBERS_TO_PROJECT_CELEBRATE_INTRO_TEXT,
LEARN_GITLAB,
+ EXPANDED_ERRORS,
} from '~/invite_members/constants';
import eventHub from '~/invite_members/event_hub';
import ContentTransition from '~/vue_shared/components/content_transition.vue';
@@ -36,6 +37,7 @@ import {
user3,
user4,
user5,
+ user6,
GlEmoji,
} from '../mock_data/member_modal';
@@ -95,9 +97,12 @@ describe('InviteMembersModal', () => {
const findBase = () => wrapper.findComponent(InviteModalBase);
const findIntroText = () => wrapper.findByTestId('modal-base-intro-text').text();
const findMemberErrorAlert = () => wrapper.findByTestId('alert-member-error');
+ const findMoreInviteErrorsButton = () => wrapper.findByTestId('accordion-button');
+ const findAccordion = () => wrapper.findComponent(GlCollapse);
+ const findErrorsIcon = () => wrapper.findComponent(GlIcon);
const findMemberErrorMessage = (element) =>
- `${Object.keys(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[element]}: ${
- Object.values(invitationsApiResponse.MULTIPLE_RESTRICTED.message)[element]
+ `${Object.keys(invitationsApiResponse.EXPANDED_RESTRICTED.message)[element]}: ${
+ Object.values(invitationsApiResponse.EXPANDED_RESTRICTED.message)[element]
}`;
const emitEventFromModal = (eventName) => () =>
findModal().vm.$emit(eventName, { preventDefault: jest.fn() });
@@ -666,8 +671,8 @@ describe('InviteMembersModal', () => {
it('displays errors for multiple and allows clearing', async () => {
createInviteMembersToGroupWrapper();
- await triggerMembersTokenSelect([user3, user4, user5]);
- mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.MULTIPLE_RESTRICTED);
+ await triggerMembersTokenSelect([user3, user4, user5, user6]);
+ mockInvitationsApi(httpStatus.CREATED, invitationsApiResponse.EXPANDED_RESTRICTED);
clickInviteButton();
@@ -675,19 +680,44 @@ describe('InviteMembersModal', () => {
expect(findMemberErrorAlert().exists()).toBe(true);
expect(findMemberErrorAlert().props('title')).toContain(
- "The following 3 members couldn't be invited",
+ "The following 4 members couldn't be invited",
);
expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(0));
expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(1));
expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(2));
+ expect(findMemberErrorAlert().text()).toContain(findMemberErrorMessage(3));
+ expect(findAccordion().exists()).toBe(true);
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
+ expect(findErrorsIcon().attributes('class')).not.toContain('gl-rotate-180');
+ expect(findAccordion().attributes('visible')).toBeUndefined();
+
+ await findMoreInviteErrorsButton().vm.$emit('click');
+
+ expect(findMoreInviteErrorsButton().text()).toContain(EXPANDED_ERRORS);
+ expect(findErrorsIcon().attributes('class')).toContain('gl-rotate-180');
+ expect(findAccordion().attributes('visible')).toBeDefined();
+
+ await findMoreInviteErrorsButton().vm.$emit('click');
+
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (2)');
+ expect(findAccordion().attributes('visible')).toBeUndefined();
await removeMembersToken(user3);
+ expect(findMoreInviteErrorsButton().text()).toContain('Show more (1)');
expect(findMemberErrorAlert().props('title')).toContain(
- "The following 2 members couldn't be invited",
+ "The following 3 members couldn't be invited",
);
expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(0));
+ await removeMembersToken(user6);
+
+ expect(findMoreInviteErrorsButton().exists()).toBe(false);
+ expect(findMemberErrorAlert().props('title')).toContain(
+ "The following 2 members couldn't be invited",
+ );
+ expect(findMemberErrorAlert().text()).not.toContain(findMemberErrorMessage(2));
+
await removeMembersToken(user4);
expect(findMemberErrorAlert().props('title')).toContain(
diff --git a/spec/frontend/invite_members/components/members_token_select_spec.js b/spec/frontend/invite_members/components/members_token_select_spec.js
index 6375d0f7e2e..0455460918c 100644
--- a/spec/frontend/invite_members/components/members_token_select_spec.js
+++ b/spec/frontend/invite_members/components/members_token_select_spec.js
@@ -5,6 +5,7 @@ import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import * as UserApi from '~/api/user_api';
import MembersTokenSelect from '~/invite_members/components/members_token_select.vue';
+import { VALID_TOKEN_BACKGROUND, INVALID_TOKEN_BACKGROUND } from '~/invite_members/constants';
const label = 'testgroup';
const placeholder = 'Search for a member';
@@ -49,6 +50,39 @@ describe('MembersTokenSelect', () => {
});
});
+ describe('when there are invalidMembers', () => {
+ it('adds in the correct class values for the tokens', async () => {
+ const badToken = { ...user1, class: INVALID_TOKEN_BACKGROUND };
+ const goodToken = { ...user2, class: VALID_TOKEN_BACKGROUND };
+
+ wrapper = createComponent();
+
+ findTokenSelector().vm.$emit('input', [user1, user2]);
+
+ await waitForPromises();
+
+ expect(findTokenSelector().props('selectedTokens')).toEqual([user1, user2]);
+
+ await wrapper.setProps({ invalidMembers: { one_1: 'bad stuff' } });
+
+ expect(findTokenSelector().props('selectedTokens')).toEqual([badToken, goodToken]);
+ });
+
+ it('does not change class when invalid members are cleared', async () => {
+ // arrange - invalidMembers is non-empty and then tokens are added
+ wrapper = createComponent();
+ await wrapper.setProps({ invalidMembers: { one_1: 'bad stuff' } });
+ findTokenSelector().vm.$emit('input', [user1, user2]);
+ await waitForPromises();
+
+ // act - invalidMembers clears out
+ await wrapper.setProps({ invalidMembers: {} });
+
+ // assert - we didn't try to update the tokens
+ expect(findTokenSelector().props('selectedTokens')).toEqual([user1, user2]);
+ });
+ });
+
describe('users', () => {
beforeEach(() => {
jest.spyOn(UserApi, 'getUsers').mockResolvedValue({ data: allUsers });
diff --git a/spec/frontend/invite_members/components/user_limit_notification_spec.js b/spec/frontend/invite_members/components/user_limit_notification_spec.js
index bbc17932a49..543fc28a342 100644
--- a/spec/frontend/invite_members/components/user_limit_notification_spec.js
+++ b/spec/frontend/invite_members/components/user_limit_notification_spec.js
@@ -9,6 +9,8 @@ import {
import { freeUsersLimit, membersCount } from '../mock_data/member_modal';
+const WARNING_ALERT_TITLE = 'You only have space for 2 more members in name';
+
describe('UserLimitNotification', () => {
let wrapper;
@@ -33,7 +35,7 @@ describe('UserLimitNotification', () => {
},
...props,
},
- provide: { name: 'my group' },
+ provide: { name: 'name' },
stubs: { GlSprintf },
});
};
@@ -50,7 +52,7 @@ describe('UserLimitNotification', () => {
});
});
- describe('when close to limit with a personal namepace', () => {
+ describe('when close to limit within a personal namepace', () => {
beforeEach(() => {
createComponent(true, false, { membersCount: 3, userNamespace: true });
});
@@ -58,27 +60,24 @@ describe('UserLimitNotification', () => {
it('renders the limit for a personal namespace', () => {
const alert = findAlert();
- expect(alert.attributes('title')).toEqual(
- 'You only have space for 2 more members in your personal projects',
- );
+ expect(alert.attributes('title')).toEqual(WARNING_ALERT_TITLE);
+
expect(alert.text()).toEqual(
'To make more space, you can remove members who no longer need access.',
);
});
});
- describe('when close to limit', () => {
+ describe('when close to limit within a group', () => {
it("renders user's limit notification", () => {
createComponent(true, false, { membersCount: 3 });
const alert = findAlert();
- expect(alert.attributes('title')).toEqual(
- 'You only have space for 2 more members in my group',
- );
+ expect(alert.attributes('title')).toEqual(WARNING_ALERT_TITLE);
expect(alert.text()).toEqual(
- 'To get more members an owner of this namespace can start a trial or upgrade to a paid tier.',
+ 'To get more members an owner of the group can start a trial or upgrade to a paid tier.',
);
});
});
@@ -89,7 +88,7 @@ describe('UserLimitNotification', () => {
const alert = findAlert();
- expect(alert.attributes('title')).toEqual("You've reached your 5 members limit for my group");
+ expect(alert.attributes('title')).toEqual("You've reached your 5 members limit for name");
expect(alert.text()).toEqual(REACHED_LIMIT_UPGRADE_SUGGESTION_MESSAGE);
});
diff --git a/spec/frontend/invite_members/mock_data/api_responses.js b/spec/frontend/invite_members/mock_data/api_responses.js
index 4ad3b6aeb66..6fe06decb6b 100644
--- a/spec/frontend/invite_members/mock_data/api_responses.js
+++ b/spec/frontend/invite_members/mock_data/api_responses.js
@@ -26,6 +26,20 @@ const MULTIPLE_RESTRICTED = {
status: 'error',
};
+const EXPANDED_RESTRICTED = {
+ message: {
+ 'email@example.com':
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ 'email4@example.com':
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
+ 'email5@example.com':
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check the Domain denylist.",
+ root:
+ "The member's email address is not allowed for this project. Go to the Admin area > Sign-up restrictions, and check Allowed domains for sign-ups.",
+ },
+ status: 'error',
+};
+
const EMAIL_TAKEN = {
message: {
'email@example.org': "The member's email address has already been taken",
@@ -41,4 +55,5 @@ export const invitationsApiResponse = {
EMAIL_RESTRICTED,
MULTIPLE_RESTRICTED,
EMAIL_TAKEN,
+ EXPANDED_RESTRICTED,
};
diff --git a/spec/frontend/invite_members/mock_data/member_modal.js b/spec/frontend/invite_members/mock_data/member_modal.js
index 7d675b6206c..4f4e9345e46 100644
--- a/spec/frontend/invite_members/mock_data/member_modal.js
+++ b/spec/frontend/invite_members/mock_data/member_modal.js
@@ -39,5 +39,10 @@ export const user5 = {
name: 'root',
avatar_url: '',
};
+export const user6 = {
+ id: 'user-defined-token3',
+ name: 'email5@example.com',
+ avatar_url: '',
+};
export const GlEmoji = { template: '<img/>' };
diff --git a/spec/frontend/issuable/components/related_issuable_item_spec.js b/spec/frontend/issuable/components/related_issuable_item_spec.js
index 6b48f83041a..3f9f048605a 100644
--- a/spec/frontend/issuable/components/related_issuable_item_spec.js
+++ b/spec/frontend/issuable/components/related_issuable_item_spec.js
@@ -1,23 +1,25 @@
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import { GlIcon, GlLink, GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import IssueDueDate from '~/boards/components/issue_due_date.vue';
import { formatDate } from '~/lib/utils/datetime_utility';
+import { updateHistory } from '~/lib/utils/url_utility';
+import { __ } from '~/locale';
import RelatedIssuableItem from '~/issuable/components/related_issuable_item.vue';
+import IssueMilestone from '~/issuable/components/issue_milestone.vue';
+import IssueAssignees from '~/issuable/components/issue_assignees.vue';
+import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import { defaultAssignees, defaultMilestone } from './related_issuable_mock_data';
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ updateHistory: jest.fn(),
+}));
+
describe('RelatedIssuableItem', () => {
let wrapper;
- function mountComponent({ mountMethod = mount, stubs = {}, props = {}, slots = {} } = {}) {
- wrapper = mountMethod(RelatedIssuableItem, {
- propsData: props,
- slots,
- stubs,
- });
- }
-
- const props = {
+ const defaultProps = {
idKey: 1,
displayReference: 'gitlab-org/gitlab-test#1',
pathIdSeparator: '#',
@@ -31,84 +33,94 @@ describe('RelatedIssuableItem', () => {
assignees: defaultAssignees,
eventNamespace: 'relatedIssue',
};
- const slots = {
- dueDate: '<div class="js-due-date-slot"></div>',
- weight: '<div class="js-weight-slot"></div>',
- };
-
- const findRemoveButton = () => wrapper.find({ ref: 'removeButton' });
- const findLockIcon = () => wrapper.find({ ref: 'lockIcon' });
- beforeEach(() => {
- mountComponent({ props, slots });
- });
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findIssueDueDate = () => wrapper.findComponent(IssueDueDate);
+ const findLockIcon = () => wrapper.find('[data-testid="lockIcon"]');
+ const findRemoveButton = () => wrapper.findComponent(GlButton);
+ const findTitleLink = () => wrapper.findComponent(GlLink);
+ const findWorkItemDetailModal = () => wrapper.findComponent(WorkItemDetailModal);
+
+ function mountComponent({ data = {}, props = {} } = {}) {
+ wrapper = shallowMount(RelatedIssuableItem, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ data() {
+ return data;
+ },
+ });
+ }
afterEach(() => {
wrapper.destroy();
});
it('contains issuable-info-container class when canReorder is false', () => {
- expect(wrapper.props('canReorder')).toBe(false);
- expect(wrapper.find('.issuable-info-container').exists()).toBe(true);
+ mountComponent({ props: { canReorder: false } });
+
+ expect(wrapper.classes('issuable-info-container')).toBe(true);
});
it('does not render token state', () => {
+ mountComponent();
+
expect(wrapper.find('.text-secondary svg').exists()).toBe(false);
});
it('does not render remove button', () => {
- expect(wrapper.find({ ref: 'removeButton' }).exists()).toBe(false);
+ mountComponent();
+
+ expect(findRemoveButton().exists()).toBe(false);
});
describe('token title', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
it('links to computedPath', () => {
- expect(wrapper.find('.item-title a').attributes('href')).toEqual(wrapper.props('path'));
+ expect(findTitleLink().attributes('href')).toBe(defaultProps.path);
});
it('renders confidential icon', () => {
- expect(wrapper.find('.confidential-icon').exists()).toBe(true);
+ expect(findIcon().attributes('title')).toBe(__('Confidential'));
});
it('renders title', () => {
- expect(wrapper.find('.item-title a').text()).toEqual(props.title);
+ expect(findTitleLink().text()).toBe(defaultProps.title);
});
});
describe('token state', () => {
- const tokenState = () => wrapper.find({ ref: 'iconElementXL' });
-
- beforeEach(() => {
- wrapper.setProps({ state: 'opened' });
- });
-
- it('renders if hasState', () => {
- expect(tokenState().exists()).toBe(true);
- });
-
it('renders state title', () => {
- const stateTitle = tokenState().attributes('title');
- const formattedCreateDate = formatDate(props.createdAt);
+ mountComponent({ props: { state: 'opened' } });
+ const stateTitle = findIcon().attributes('title');
+ const formattedCreateDate = formatDate(defaultProps.createdAt);
expect(stateTitle).toContain('<span class="bold">Created</span>');
expect(stateTitle).toContain(`<span class="text-tertiary">${formattedCreateDate}</span>`);
});
it('renders aria label', () => {
- expect(tokenState().attributes('aria-label')).toEqual('opened');
+ mountComponent({ props: { state: 'opened' } });
+
+ expect(findIcon().attributes('arialabel')).toBe('opened');
});
it('renders open icon when open state', () => {
- expect(tokenState().classes('issue-token-state-icon-open')).toBe(true);
+ mountComponent({ props: { state: 'opened' } });
+
+ expect(findIcon().props('name')).toBe('issue-open-m');
+ expect(findIcon().classes('issue-token-state-icon-open')).toBe(true);
});
- it('renders close icon when close state', async () => {
- wrapper.setProps({
- state: 'closed',
- closedAt: '2018-12-01T00:00:00.00Z',
- });
- await nextTick();
+ it('renders close icon when close state', () => {
+ mountComponent({ props: { state: 'closed', closedAt: '2018-12-01T00:00:00.00Z' } });
- expect(tokenState().classes('issue-token-state-icon-closed')).toBe(true);
+ expect(findIcon().props('name')).toBe('issue-close');
+ expect(findIcon().classes('issue-token-state-icon-closed')).toBe(true);
});
});
@@ -116,75 +128,66 @@ describe('RelatedIssuableItem', () => {
const tokenMetadata = () => wrapper.find('.item-meta');
it('renders item path and ID', () => {
+ mountComponent();
const pathAndID = tokenMetadata().find('.item-path-id').text();
expect(pathAndID).toContain('gitlab-org/gitlab-test');
expect(pathAndID).toContain('#1');
});
- it('renders milestone icon and name', () => {
- const milestoneIcon = tokenMetadata().find('.item-milestone svg');
- const milestoneTitle = tokenMetadata().find('.item-milestone .milestone-title');
+ it('renders milestone', () => {
+ mountComponent();
- expect(milestoneIcon.attributes('data-testid')).toBe('clock-icon');
- expect(milestoneTitle.text()).toContain('Milestone title');
+ expect(wrapper.findComponent(IssueMilestone).props('milestone')).toEqual(
+ defaultProps.milestone,
+ );
});
it('renders due date component with correct due date', () => {
- expect(wrapper.find(IssueDueDate).props('date')).toBe(props.dueDate);
+ mountComponent();
+
+ expect(findIssueDueDate().props('date')).toBe(defaultProps.dueDate);
});
- it('does not render red icon for overdue issue that is closed', async () => {
- mountComponent({
- props: {
- ...props,
- closedAt: '2018-12-01T00:00:00.00Z',
- },
- });
- await nextTick();
+ it('does not render red icon for overdue issue that is closed', () => {
+ mountComponent({ props: { closedAt: '2018-12-01T00:00:00.00Z' } });
- expect(wrapper.find(IssueDueDate).props('closed')).toBe(true);
+ expect(findIssueDueDate().props('closed')).toBe(true);
});
});
describe('token assignees', () => {
it('renders assignees avatars', () => {
- // Expect 2 times 2 because assignees are rendered twice, due to layout issues
- expect(wrapper.findAll('.item-assignees .user-avatar-link').length).toBeDefined();
+ mountComponent();
- expect(wrapper.find('.item-assignees .avatar-counter').text()).toContain('+2');
+ expect(wrapper.findComponent(IssueAssignees).props('assignees')).toEqual(
+ defaultProps.assignees,
+ );
});
});
describe('remove button', () => {
beforeEach(() => {
- wrapper.setProps({ canRemove: true });
+ mountComponent({ props: { canRemove: true }, data: { removeDisabled: true } });
});
it('renders if canRemove', () => {
- expect(findRemoveButton().exists()).toBe(true);
+ expect(findRemoveButton().props('icon')).toBe('close');
+ expect(findRemoveButton().attributes('aria-label')).toBe(__('Remove'));
});
it('does not render the lock icon', () => {
expect(findLockIcon().exists()).toBe(false);
});
- it('renders disabled button when removeDisabled', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({ removeDisabled: true });
- await nextTick();
-
- expect(findRemoveButton().attributes('disabled')).toEqual('disabled');
+ it('renders disabled button when removeDisabled', () => {
+ expect(findRemoveButton().attributes('disabled')).toBe('true');
});
- it('triggers onRemoveRequest when clicked', async () => {
- findRemoveButton().trigger('click');
- await nextTick();
- const { relatedIssueRemoveRequest } = wrapper.emitted();
+ it('triggers onRemoveRequest when clicked', () => {
+ findRemoveButton().vm.$emit('click');
- expect(relatedIssueRemoveRequest.length).toBe(1);
- expect(relatedIssueRemoveRequest[0]).toEqual([props.idKey]);
+ expect(wrapper.emitted('relatedIssueRemoveRequest')).toEqual([[defaultProps.idKey]]);
});
});
@@ -192,10 +195,7 @@ describe('RelatedIssuableItem', () => {
const lockedMessage = 'Issues created from a vulnerability cannot be removed';
beforeEach(() => {
- wrapper.setProps({
- isLocked: true,
- lockedMessage,
- });
+ mountComponent({ props: { isLocked: true, lockedMessage } });
});
it('does not render the remove button', () => {
@@ -206,4 +206,67 @@ describe('RelatedIssuableItem', () => {
expect(findLockIcon().attributes('title')).toBe(lockedMessage);
});
});
+
+ describe('work item modal', () => {
+ const workItem = 'gid://gitlab/WorkItem/1';
+
+ it('renders', () => {
+ mountComponent();
+
+ expect(findWorkItemDetailModal().props('workItemId')).toBe(workItem);
+ });
+
+ describe('when work item is issue and the related issue title is clicked', () => {
+ it('does not open', () => {
+ mountComponent({ props: { workItemType: 'ISSUE' } });
+ wrapper.vm.$refs.modal.show = jest.fn();
+
+ findTitleLink().vm.$emit('click', { preventDefault: () => {} });
+
+ expect(wrapper.vm.$refs.modal.show).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when work item is task and the related issue title is clicked', () => {
+ beforeEach(() => {
+ mountComponent({ props: { workItemType: 'TASK' } });
+ wrapper.vm.$refs.modal.show = jest.fn();
+ findTitleLink().vm.$emit('click', { preventDefault: () => {} });
+ });
+
+ it('opens', () => {
+ expect(wrapper.vm.$refs.modal.show).toHaveBeenCalled();
+ });
+
+ it('updates the url params with the work item id', () => {
+ expect(updateHistory).toHaveBeenCalledWith({
+ url: `${TEST_HOST}/?work_item_id=1`,
+ replace: true,
+ });
+ });
+ });
+
+ describe('when it emits "workItemDeleted" event', () => {
+ it('emits "relatedIssueRemoveRequest" event', () => {
+ mountComponent();
+
+ findWorkItemDetailModal().vm.$emit('workItemDeleted', workItem);
+
+ expect(wrapper.emitted('relatedIssueRemoveRequest')).toEqual([[workItem]]);
+ });
+ });
+
+ describe('when it emits "close" event', () => {
+ it('removes the work item id from the url params', () => {
+ mountComponent();
+
+ findWorkItemDetailModal().vm.$emit('close');
+
+ expect(updateHistory).toHaveBeenCalledWith({
+ url: `${TEST_HOST}/`,
+ replace: true,
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/issuable/popover/components/issue_popover_spec.js b/spec/frontend/issuable/popover/components/issue_popover_spec.js
index 3e77e750f3a..444165f61c7 100644
--- a/spec/frontend/issuable/popover/components/issue_popover_spec.js
+++ b/spec/frontend/issuable/popover/components/issue_popover_spec.js
@@ -1,33 +1,23 @@
-import { GlSkeletonLoader } from '@gitlab/ui';
+import { GlIcon, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import issueQueryResponse from 'test_fixtures/graphql/issuable/popover/queries/issue.query.graphql.json';
+import issueQuery from 'ee_else_ce/issuable/popover/queries/issue.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import IssueDueDate from '~/boards/components/issue_due_date.vue';
+import IssueMilestone from '~/issuable/components/issue_milestone.vue';
import StatusBox from '~/issuable/components/status_box.vue';
import IssuePopover from '~/issuable/popover/components/issue_popover.vue';
-import issueQuery from '~/issuable/popover/queries/issue.query.graphql';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
describe('Issue Popover', () => {
let wrapper;
Vue.use(VueApollo);
- const issueQueryResponse = {
- data: {
- project: {
- __typename: 'Project',
- id: '1',
- issue: {
- __typename: 'Issue',
- id: 'gid://gitlab/Issue/1',
- createdAt: '2020-07-01T04:08:01Z',
- state: 'opened',
- title: 'Issue title',
- },
- },
- },
- };
+ const findWorkItemIcon = () => wrapper.findComponent(WorkItemTypeIcon);
const mountComponent = ({
queryResponse = jest.fn().mockResolvedValue(issueQueryResponse),
@@ -53,6 +43,12 @@ describe('Issue Popover', () => {
expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
+ it('should not show any work item icon while apollo is loading', () => {
+ mountComponent();
+
+ expect(findWorkItemIcon().exists()).toBe(false);
+ });
+
describe('when loaded', () => {
beforeEach(() => {
mountComponent();
@@ -74,8 +70,40 @@ describe('Issue Popover', () => {
expect(wrapper.find('h5').text()).toBe(issueQueryResponse.data.project.issue.title);
});
+ it('shows the work type icon', () => {
+ expect(findWorkItemIcon().props('workItemType')).toBe(
+ issueQueryResponse.data.project.issue.type,
+ );
+ });
+
it('shows reference', () => {
expect(wrapper.text()).toContain('foo/bar#1');
});
+
+ it('shows confidential icon', () => {
+ const icon = wrapper.findComponent(GlIcon);
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.props('name')).toBe('eye-slash');
+ });
+
+ it('shows due date', () => {
+ const component = wrapper.findComponent(IssueDueDate);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props('date')).toBe('2020-07-05');
+ expect(component.props('closed')).toBe(false);
+ });
+
+ it('shows milestone', () => {
+ const component = wrapper.findComponent(IssueMilestone);
+
+ expect(component.exists()).toBe(true);
+ expect(component.props('milestone')).toMatchObject({
+ title: '15.2',
+ startDate: '2020-07-01',
+ dueDate: '2020-07-30',
+ });
+ });
});
});
diff --git a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
index ce98a16dbb7..16d4459f597 100644
--- a/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
+++ b/spec/frontend/issuable/related_issues/components/add_issuable_form_spec.js
@@ -157,8 +157,8 @@ describe('AddIssuableForm', () => {
describe('categorized issuables', () => {
it.each`
issuableType | pathIdSeparator | contextHeader | contextFooter
- ${issuableTypesMap.ISSUE} | ${PathIdSeparator.Issue} | ${'The current issue'} | ${'the following issue(s)'}
- ${issuableTypesMap.EPIC} | ${PathIdSeparator.Epic} | ${'The current epic'} | ${'the following epic(s)'}
+ ${issuableTypesMap.ISSUE} | ${PathIdSeparator.Issue} | ${'The current issue'} | ${'the following issues'}
+ ${issuableTypesMap.EPIC} | ${PathIdSeparator.Epic} | ${'The current epic'} | ${'the following epics'}
`(
'show header text as "$contextHeader" and footer text as "$contextFooter" issuableType is set to $issuableType',
({ issuableType, contextHeader, contextFooter }) => {
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
index 7a350df0ba6..772cc75a205 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_block_spec.js
@@ -1,5 +1,6 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { GlIcon } from '@gitlab/ui';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import {
issuable1,
issuable2,
@@ -17,7 +18,9 @@ import {
describe('RelatedIssuesBlock', () => {
let wrapper;
- const findIssueCountBadgeAddButton = () => wrapper.find(GlButton);
+ const findToggleButton = () => wrapper.findByTestId('toggle-links');
+ const findRelatedIssuesBody = () => wrapper.findByTestId('related-issues-body');
+ const findIssueCountBadgeAddButton = () => wrapper.findByTestId('related-issues-plus-button');
afterEach(() => {
if (wrapper) {
@@ -28,7 +31,7 @@ describe('RelatedIssuesBlock', () => {
describe('with defaults', () => {
beforeEach(() => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
issuableType: issuableTypesMap.ISSUE,
@@ -37,13 +40,13 @@ describe('RelatedIssuesBlock', () => {
});
it.each`
- issuableType | pathIdSeparator | titleText | helpLinkText | addButtonText
- ${'issue'} | ${PathIdSeparator.Issue} | ${'Linked issues'} | ${'Read more about related issues'} | ${'Add a related issue'}
- ${'epic'} | ${PathIdSeparator.Epic} | ${'Linked epics'} | ${'Read more about related epics'} | ${'Add a related epic'}
+ issuableType | pathIdSeparator | titleText | helpLinkText | addButtonText
+ ${'issue'} | ${PathIdSeparator.Issue} | ${'Linked items'} | ${'Read more about related issues'} | ${'Add a related issue'}
+ ${'epic'} | ${PathIdSeparator.Epic} | ${'Linked epics'} | ${'Read more about related epics'} | ${'Add a related epic'}
`(
'displays "$titleText" in the header, "$helpLinkText" aria-label for help link, and "$addButtonText" aria-label for add button when issuableType is set to "$issuableType"',
({ issuableType, pathIdSeparator, titleText, helpLinkText, addButtonText }) => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator,
issuableType,
@@ -73,7 +76,7 @@ describe('RelatedIssuesBlock', () => {
it('displays header text slot data', () => {
const headerText = '<div>custom header text</div>';
- wrapper = shallowMount(RelatedIssuesBlock, {
+ wrapper = shallowMountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
issuableType: 'issue',
@@ -89,7 +92,7 @@ describe('RelatedIssuesBlock', () => {
it('displays header actions slot data', () => {
const headerActions = '<button data-testid="custom-button">custom button</button>';
- wrapper = shallowMount(RelatedIssuesBlock, {
+ wrapper = shallowMountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
issuableType: 'issue',
@@ -103,7 +106,7 @@ describe('RelatedIssuesBlock', () => {
describe('with isFetching=true', () => {
beforeEach(() => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
isFetching: true,
@@ -119,7 +122,7 @@ describe('RelatedIssuesBlock', () => {
describe('with canAddRelatedIssues=true', () => {
beforeEach(() => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
canAdmin: true,
@@ -135,7 +138,7 @@ describe('RelatedIssuesBlock', () => {
describe('with isFormVisible=true', () => {
beforeEach(() => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
isFormVisible: true,
@@ -159,7 +162,7 @@ describe('RelatedIssuesBlock', () => {
const categorizedHeadings = () => wrapper.findAll('h4');
const headingTextAt = (index) => categorizedHeadings().at(index).text();
const mountComponent = (showCategorizedIssues) => {
- wrapper = mount(RelatedIssuesBlock, {
+ wrapper = mountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
relatedIssues: [issuable1, issuable2, issuable3],
@@ -217,7 +220,7 @@ describe('RelatedIssuesBlock', () => {
},
].forEach(({ issuableType, icon }) => {
it(`issuableType=${issuableType} is passed`, () => {
- wrapper = shallowMount(RelatedIssuesBlock, {
+ wrapper = shallowMountExtended(RelatedIssuesBlock, {
propsData: {
pathIdSeparator: PathIdSeparator.Issue,
issuableType,
@@ -230,4 +233,42 @@ describe('RelatedIssuesBlock', () => {
});
});
});
+
+ describe('toggle', () => {
+ beforeEach(() => {
+ wrapper = shallowMountExtended(RelatedIssuesBlock, {
+ propsData: {
+ pathIdSeparator: PathIdSeparator.Issue,
+ relatedIssues: [issuable1, issuable2, issuable3],
+ issuableType: issuableTypesMap.ISSUE,
+ },
+ });
+ });
+
+ it('is expanded by default', () => {
+ expect(findToggleButton().props('icon')).toBe('chevron-lg-up');
+ expect(findToggleButton().props('disabled')).toBe(false);
+ expect(findRelatedIssuesBody().exists()).toBe(true);
+ });
+
+ it('expands on click toggle button', async () => {
+ findToggleButton().vm.$emit('click');
+ await nextTick();
+
+ expect(findToggleButton().props('icon')).toBe('chevron-lg-down');
+ expect(findRelatedIssuesBody().exists()).toBe(false);
+ });
+ });
+
+ it('toggle button is disabled when issue has no related items', () => {
+ wrapper = shallowMountExtended(RelatedIssuesBlock, {
+ propsData: {
+ pathIdSeparator: PathIdSeparator.Issue,
+ relatedIssues: [],
+ issuableType: 'issue',
+ },
+ });
+
+ expect(findToggleButton().props('disabled')).toBe(true);
+ });
});
diff --git a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
index 1a03ea58b60..b518d2fbdec 100644
--- a/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
+++ b/spec/frontend/issuable/related_issues/components/related_issues_root_spec.js
@@ -1,4 +1,4 @@
-import { mount, shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
@@ -9,8 +9,9 @@ import {
} from 'jest/issuable/components/related_issuable_mock_data';
import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
import { linkedIssueTypesMap } from '~/related_issues/constants';
+import RelatedIssuesBlock from '~/related_issues/components/related_issues_block.vue';
+import RelatedIssuesRoot from '~/related_issues/components/related_issues_root.vue';
import relatedIssuesService from '~/related_issues/services/related_issues_service';
jest.mock('~/flash');
@@ -19,6 +20,8 @@ describe('RelatedIssuesRoot', () => {
let wrapper;
let mock;
+ const findRelatedIssuesBlock = () => wrapper.findComponent(RelatedIssuesBlock);
+
beforeEach(() => {
mock = new MockAdapter(axios);
mock.onGet(defaultProps.endpoint).reply(200, []);
@@ -26,100 +29,114 @@ describe('RelatedIssuesRoot', () => {
afterEach(() => {
mock.restore();
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
+ wrapper.destroy();
});
- const createComponent = (mountFn = mount) => {
- wrapper = mountFn(RelatedIssuesRoot, {
- propsData: defaultProps,
+ const createComponent = ({ props = {}, data = {} } = {}) => {
+ wrapper = mount(RelatedIssuesRoot, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ data() {
+ return data;
+ },
});
// Wait for fetch request `fetchRelatedIssues` to complete before starting to test
return waitForPromises();
};
- describe('methods', () => {
- describe('onRelatedIssueRemoveRequest', () => {
- beforeEach(() => {
- jest
- .spyOn(relatedIssuesService.prototype, 'fetchRelatedIssues')
- .mockReturnValue(Promise.reject());
-
- return createComponent().then(() => {
+ describe('events', () => {
+ describe('when "relatedIssueRemoveRequest" event is emitted', () => {
+ describe('when emitted value is a numerical issue', () => {
+ beforeEach(async () => {
+ jest
+ .spyOn(relatedIssuesService.prototype, 'fetchRelatedIssues')
+ .mockReturnValue(Promise.reject());
+ await createComponent();
wrapper.vm.store.setRelatedIssues([issuable1]);
});
- });
- it('remove related issue and succeeds', () => {
- mock.onDelete(issuable1.referencePath).reply(200, { issues: [] });
+ it('removes related issue on API success', async () => {
+ mock.onDelete(issuable1.referencePath).reply(200, { issues: [] });
+
+ findRelatedIssuesBlock().vm.$emit('relatedIssueRemoveRequest', issuable1.id);
+ await axios.waitForAll();
+
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toEqual([]);
+ });
+
+ it('does not remove related issue on API error', async () => {
+ mock.onDelete(issuable1.referencePath).reply(422, {});
- wrapper.vm.onRelatedIssueRemoveRequest(issuable1.id);
+ findRelatedIssuesBlock().vm.$emit('relatedIssueRemoveRequest', issuable1.id);
+ await axios.waitForAll();
- return axios.waitForAll().then(() => {
- expect(wrapper.vm.state.relatedIssues).toEqual([]);
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toEqual([
+ expect.objectContaining({ id: issuable1.id }),
+ ]);
});
});
- it('remove related issue, fails, and restores to related issues', () => {
- mock.onDelete(issuable1.referencePath).reply(422, {});
+ describe('when emitted value is a work item id', () => {
+ it('removes related issue', async () => {
+ const workItem = `gid://gitlab/WorkItem/${issuable1.id}`;
+ createComponent({ data: { state: { relatedIssues: [issuable1] } } });
- wrapper.vm.onRelatedIssueRemoveRequest(issuable1.id);
+ findRelatedIssuesBlock().vm.$emit('relatedIssueRemoveRequest', workItem);
+ await nextTick();
- return axios.waitForAll().then(() => {
- expect(wrapper.vm.state.relatedIssues).toHaveLength(1);
- expect(wrapper.vm.state.relatedIssues[0].id).toEqual(issuable1.id);
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toEqual([]);
});
});
});
- describe('onToggleAddRelatedIssuesForm', () => {
- beforeEach(() => createComponent(shallowMount));
+ describe('when "toggleAddRelatedIssuesForm" event is emitted', () => {
+ it('toggles related issues form to visible from hidden', async () => {
+ createComponent();
- it('toggle related issues form to visible', () => {
- wrapper.vm.onToggleAddRelatedIssuesForm();
+ findRelatedIssuesBlock().vm.$emit('toggleAddRelatedIssuesForm');
+ await nextTick();
- expect(wrapper.vm.isFormVisible).toEqual(true);
+ expect(findRelatedIssuesBlock().props('isFormVisible')).toBe(true);
});
- it('show add related issues form to hidden', () => {
- wrapper.vm.isFormVisible = true;
+ it('toggles related issues form to hidden from visible', async () => {
+ createComponent({ data: { isFormVisible: true } });
- wrapper.vm.onToggleAddRelatedIssuesForm();
+ findRelatedIssuesBlock().vm.$emit('toggleAddRelatedIssuesForm');
+ await nextTick();
- expect(wrapper.vm.isFormVisible).toEqual(false);
+ expect(findRelatedIssuesBlock().props('isFormVisible')).toBe(false);
});
});
- describe('onPendingIssueRemoveRequest', () => {
- beforeEach(() =>
- createComponent().then(() => {
- wrapper.vm.store.setPendingReferences([issuable1.reference]);
- }),
- );
+ describe('when "pendingIssuableRemoveRequest" event is emitted', () => {
+ beforeEach(() => {
+ createComponent();
+ wrapper.vm.store.setPendingReferences([issuable1.reference]);
+ });
- it('remove pending related issue', () => {
- expect(wrapper.vm.state.pendingReferences).toHaveLength(1);
+ it('removes pending related issue', async () => {
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(1);
- wrapper.vm.onPendingIssueRemoveRequest(0);
+ findRelatedIssuesBlock().vm.$emit('pendingIssuableRemoveRequest', 0);
+ await nextTick();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(0);
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(0);
});
});
- describe('onPendingFormSubmit', () => {
- beforeEach(() => {
+ describe('when "addIssuableFormSubmit" event is emitted', () => {
+ beforeEach(async () => {
jest
.spyOn(relatedIssuesService.prototype, 'fetchRelatedIssues')
.mockReturnValue(Promise.reject());
-
- return createComponent().then(() => {
- jest.spyOn(wrapper.vm, 'processAllReferences');
- jest.spyOn(wrapper.vm.service, 'addRelatedIssues');
- createFlash.mockClear();
- });
+ await createComponent();
+ jest.spyOn(wrapper.vm, 'processAllReferences');
+ jest.spyOn(wrapper.vm.service, 'addRelatedIssues');
+ createFlash.mockClear();
});
it('processes references before submitting', () => {
@@ -130,23 +147,22 @@ describe('RelatedIssuesRoot', () => {
linkedIssueType,
};
- wrapper.vm.onPendingFormSubmit(emitObj);
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', emitObj);
expect(wrapper.vm.processAllReferences).toHaveBeenCalledWith(input);
expect(wrapper.vm.service.addRelatedIssues).toHaveBeenCalledWith([input], linkedIssueType);
});
- it('submit zero pending issue as related issue', () => {
+ it('submits zero pending issues as related issue', () => {
wrapper.vm.store.setPendingReferences([]);
- wrapper.vm.onPendingFormSubmit({});
- return waitForPromises().then(() => {
- expect(wrapper.vm.state.pendingReferences).toHaveLength(0);
- expect(wrapper.vm.state.relatedIssues).toHaveLength(0);
- });
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', {});
+
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(0);
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toHaveLength(0);
});
- it('submit pending issue as related issue', () => {
+ it('submits pending issue as related issue', async () => {
mock.onPost(defaultProps.endpoint).reply(200, {
issuables: [issuable1],
result: {
@@ -154,18 +170,18 @@ describe('RelatedIssuesRoot', () => {
status: 'success',
},
});
-
wrapper.vm.store.setPendingReferences([issuable1.reference]);
- wrapper.vm.onPendingFormSubmit({});
- return waitForPromises().then(() => {
- expect(wrapper.vm.state.pendingReferences).toHaveLength(0);
- expect(wrapper.vm.state.relatedIssues).toHaveLength(1);
- expect(wrapper.vm.state.relatedIssues[0].id).toEqual(issuable1.id);
- });
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', {});
+ await waitForPromises();
+
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(0);
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toEqual([
+ expect.objectContaining({ id: issuable1.id }),
+ ]);
});
- it('submit multiple pending issues as related issues', () => {
+ it('submits multiple pending issues as related issues', async () => {
mock.onPost(defaultProps.endpoint).reply(200, {
issuables: [issuable1, issuable2],
result: {
@@ -173,201 +189,148 @@ describe('RelatedIssuesRoot', () => {
status: 'success',
},
});
-
wrapper.vm.store.setPendingReferences([issuable1.reference, issuable2.reference]);
- wrapper.vm.onPendingFormSubmit({});
- return waitForPromises().then(() => {
- expect(wrapper.vm.state.pendingReferences).toHaveLength(0);
- expect(wrapper.vm.state.relatedIssues).toHaveLength(2);
- expect(wrapper.vm.state.relatedIssues[0].id).toEqual(issuable1.id);
- expect(wrapper.vm.state.relatedIssues[1].id).toEqual(issuable2.id);
- });
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', {});
+ await waitForPromises();
+
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(0);
+ expect(findRelatedIssuesBlock().props('relatedIssues')).toEqual([
+ expect.objectContaining({ id: issuable1.id }),
+ expect.objectContaining({ id: issuable2.id }),
+ ]);
});
- it('displays a message from the backend upon error', () => {
+ it('displays a message from the backend upon error', async () => {
const input = '#123';
const message = 'error';
-
mock.onPost(defaultProps.endpoint).reply(409, { message });
wrapper.vm.store.setPendingReferences([issuable1.reference, issuable2.reference]);
expect(createFlash).not.toHaveBeenCalled();
- wrapper.vm.onPendingFormSubmit(input);
-
- return waitForPromises().then(() => {
- expect(createFlash).toHaveBeenCalledWith({
- message,
- });
- });
- });
- });
- describe('onPendingFormCancel', () => {
- beforeEach(() =>
- createComponent().then(() => {
- wrapper.vm.isFormVisible = true;
- wrapper.vm.inputValue = 'foo';
- }),
- );
-
- it('when canceling and hiding add issuable form', async () => {
- wrapper.vm.onPendingFormCancel();
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormSubmit', input);
+ await waitForPromises();
- await nextTick();
- expect(wrapper.vm.isFormVisible).toEqual(false);
- expect(wrapper.vm.inputValue).toEqual('');
- expect(wrapper.vm.state.pendingReferences).toHaveLength(0);
+ expect(createFlash).toHaveBeenCalledWith({ message });
});
});
- describe('fetchRelatedIssues', () => {
- beforeEach(() => createComponent());
-
- it('sets isFetching while fetching', async () => {
- wrapper.vm.fetchRelatedIssues();
+ describe('when "addIssuableFormCancel" event is emitted', () => {
+ beforeEach(() => createComponent({ data: { isFormVisible: true, inputValue: 'foo' } }));
- expect(wrapper.vm.isFetching).toEqual(true);
+ it('hides form and resets input', async () => {
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormCancel');
+ await nextTick();
- await waitForPromises();
- expect(wrapper.vm.isFetching).toEqual(false);
- });
-
- it('should fetch related issues', async () => {
- mock.onGet(defaultProps.endpoint).reply(200, [issuable1, issuable2]);
-
- wrapper.vm.fetchRelatedIssues();
-
- await waitForPromises();
- expect(wrapper.vm.state.relatedIssues).toHaveLength(2);
- expect(wrapper.vm.state.relatedIssues[0].id).toEqual(issuable1.id);
- expect(wrapper.vm.state.relatedIssues[1].id).toEqual(issuable2.id);
+ expect(findRelatedIssuesBlock().props('isFormVisible')).toBe(false);
+ expect(findRelatedIssuesBlock().props('inputValue')).toBe('');
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toHaveLength(0);
});
});
- describe('onInput', () => {
- beforeEach(() => createComponent());
-
- it('fill in issue number reference and adds to pending related issues', () => {
+ describe('when "addIssuableFormInput" event is emitted', () => {
+ it('updates pending references with issue reference', async () => {
const input = '#123 ';
- wrapper.vm.onInput({
+ createComponent();
+
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
untouchedRawReferences: [input.trim()],
touchedReference: input,
});
+ await nextTick();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(1);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('#123');
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toEqual([input.trim()]);
});
- it('fill in with full reference', () => {
+ it('updates pending references with full reference', async () => {
const input = 'asdf/qwer#444 ';
- wrapper.vm.onInput({ untouchedRawReferences: [input.trim()], touchedReference: input });
+ createComponent();
+
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
+ untouchedRawReferences: [input.trim()],
+ touchedReference: input,
+ });
+ await nextTick();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(1);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('asdf/qwer#444');
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toEqual([input.trim()]);
});
- it('fill in with issue link', () => {
+ it('updates pending references with issue link', async () => {
const link = 'http://localhost:3000/foo/bar/issues/111';
const input = `${link} `;
- wrapper.vm.onInput({ untouchedRawReferences: [input.trim()], touchedReference: input });
+ createComponent();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(1);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual(link);
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
+ untouchedRawReferences: [input.trim()],
+ touchedReference: input,
+ });
+ await nextTick();
+
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toEqual([link]);
});
- it('fill in with multiple references', () => {
+ it('updates pending references with multiple references', async () => {
const input = 'asdf/qwer#444 #12 ';
- wrapper.vm.onInput({
+ createComponent();
+
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
untouchedRawReferences: input.trim().split(/\s/),
touchedReference: '2',
});
+ await nextTick();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(2);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('asdf/qwer#444');
- expect(wrapper.vm.state.pendingReferences[1]).toEqual('#12');
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toEqual([
+ 'asdf/qwer#444',
+ '#12',
+ ]);
});
- it('fill in with some invalid things', () => {
+ it('updates pending references with invalid values', async () => {
const input = 'something random ';
- wrapper.vm.onInput({
+ createComponent();
+
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
untouchedRawReferences: input.trim().split(/\s/),
touchedReference: '2',
});
+ await nextTick();
- expect(wrapper.vm.state.pendingReferences).toHaveLength(2);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('something');
- expect(wrapper.vm.state.pendingReferences[1]).toEqual('random');
+ expect(findRelatedIssuesBlock().props('pendingReferences')).toEqual([
+ 'something',
+ 'random',
+ ]);
});
- it.each`
- pathIdSeparator
- ${'#'}
- ${'&'}
- `(
- 'prepends $pathIdSeparator when user enters a numeric value [0-9]',
- async ({ pathIdSeparator }) => {
+ it.each(['#', '&'])(
+ 'prepends %s when user enters a numeric value [0-9]',
+ async (pathIdSeparator) => {
const input = '23';
+ createComponent({ props: { pathIdSeparator } });
- await wrapper.setProps({
- pathIdSeparator,
- });
-
- wrapper.vm.onInput({
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormInput', {
untouchedRawReferences: input.trim().split(/\s/),
touchedReference: input,
});
+ await nextTick();
- expect(wrapper.vm.inputValue).toBe(`${pathIdSeparator}${input}`);
+ expect(findRelatedIssuesBlock().props('inputValue')).toBe(`${pathIdSeparator}${input}`);
},
);
-
- it('prepends # when user enters a number', async () => {
- const input = 23;
-
- wrapper.vm.onInput({
- untouchedRawReferences: String(input).trim().split(/\s/),
- touchedReference: input,
- });
-
- expect(wrapper.vm.inputValue).toBe(`#${input}`);
- });
});
- describe('onBlur', () => {
- beforeEach(() =>
- createComponent().then(() => {
- jest.spyOn(wrapper.vm, 'processAllReferences').mockImplementation(() => {});
- }),
- );
-
- it('add any references to pending when blurring', () => {
- const input = '#123';
-
- wrapper.vm.onBlur(input);
-
- expect(wrapper.vm.processAllReferences).toHaveBeenCalledWith(input);
+ describe('when "addIssuableFormBlur" event is emitted', () => {
+ beforeEach(() => {
+ createComponent();
+ jest.spyOn(wrapper.vm, 'processAllReferences').mockImplementation(() => {});
});
- });
-
- describe('processAllReferences', () => {
- beforeEach(() => createComponent());
- it('add valid reference to pending', () => {
+ it('adds any references to pending when blurring', () => {
const input = '#123';
- wrapper.vm.processAllReferences(input);
- expect(wrapper.vm.state.pendingReferences).toHaveLength(1);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('#123');
- });
+ findRelatedIssuesBlock().vm.$emit('addIssuableFormBlur', input);
- it('add any valid references to pending', () => {
- const input = 'asdf #123';
- wrapper.vm.processAllReferences(input);
-
- expect(wrapper.vm.state.pendingReferences).toHaveLength(2);
- expect(wrapper.vm.state.pendingReferences[0]).toEqual('asdf');
- expect(wrapper.vm.state.pendingReferences[1]).toEqual('#123');
+ expect(wrapper.vm.processAllReferences).toHaveBeenCalledWith(input);
});
});
});
diff --git a/spec/frontend/issues/list/components/issues_list_app_spec.js b/spec/frontend/issues/list/components/issues_list_app_spec.js
index 3d3dbfa6853..a39853fd29c 100644
--- a/spec/frontend/issues/list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues/list/components/issues_list_app_spec.js
@@ -52,6 +52,12 @@ import { getSortKey, getSortOptions } from '~/issues/list/utils';
import axios from '~/lib/utils/axios_utils';
import { scrollUp } from '~/lib/utils/scroll_utils';
import { joinPaths } from '~/lib/utils/url_utility';
+import {
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_TASK,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+} from '~/work_items/constants';
jest.mock('@sentry/browser');
jest.mock('~/flash');
@@ -123,6 +129,7 @@ describe('CE IssuesListApp component', () => {
const mountComponent = ({
provide = {},
data = {},
+ workItems = false,
issuesQueryResponse = mockIssuesQueryResponse,
issuesCountsQueryResponse = mockIssuesCountsQueryResponse,
sortPreferenceMutationResponse = jest.fn().mockResolvedValue(setSortPreferenceMutationResponse),
@@ -141,6 +148,9 @@ describe('CE IssuesListApp component', () => {
apolloProvider: createMockApollo(requestHandlers),
router,
provide: {
+ glFeatures: {
+ workItems,
+ },
...defaultProvide,
...provide,
},
@@ -168,22 +178,6 @@ describe('CE IssuesListApp component', () => {
return waitForPromises();
});
- it('queries list with types `ISSUE` and `INCIDENT', () => {
- const expectedTypes = ['ISSUE', 'INCIDENT', 'TEST_CASE'];
-
- expect(mockIssuesQueryResponse).toHaveBeenCalledWith(
- expect.objectContaining({
- types: expectedTypes,
- }),
- );
-
- expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
- expect.objectContaining({
- types: expectedTypes,
- }),
- );
- });
-
it('renders', () => {
expect(findIssuableList().props()).toMatchObject({
namespace: defaultProvide.fullPath,
@@ -1024,6 +1018,21 @@ describe('CE IssuesListApp component', () => {
});
});
});
+
+ describe('when "page-size-change" event is emitted by IssuableList', () => {
+ it('updates url params with new page size', async () => {
+ wrapper = mountComponent();
+ router.push = jest.fn();
+
+ findIssuableList().vm.$emit('page-size-change', 50);
+ await nextTick();
+
+ expect(router.push).toHaveBeenCalledTimes(1);
+ expect(router.push).toHaveBeenCalledWith({
+ query: expect.objectContaining({ first_page_size: 50 }),
+ });
+ });
+ });
});
describe('public visibility', () => {
@@ -1045,17 +1054,45 @@ describe('CE IssuesListApp component', () => {
});
});
- describe('when "page-size-change" event is emitted by IssuableList', () => {
- it('updates url params with new page size', async () => {
- wrapper = mountComponent();
- router.push = jest.fn();
+ describe('fetching issues', () => {
+ describe('when work_items feature flag is disabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ workItems: false });
+ jest.runOnlyPendingTimers();
+ });
- findIssuableList().vm.$emit('page-size-change', 50);
- await nextTick();
+ it('fetches issue, incident, and test case types', () => {
+ const types = [
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+ ];
- expect(router.push).toHaveBeenCalledTimes(1);
- expect(router.push).toHaveBeenCalledWith({
- query: expect.objectContaining({ first_page_size: 50 }),
+ expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({ types }),
+ );
+ });
+ });
+
+ describe('when work_items feature flag is enabled', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ workItems: true });
+ jest.runOnlyPendingTimers();
+ });
+
+ it('fetches issue, incident, test case, and task types', () => {
+ const types = [
+ WORK_ITEM_TYPE_ENUM_ISSUE,
+ WORK_ITEM_TYPE_ENUM_INCIDENT,
+ WORK_ITEM_TYPE_ENUM_TEST_CASE,
+ WORK_ITEM_TYPE_ENUM_TASK,
+ ];
+
+ expect(mockIssuesQueryResponse).toHaveBeenCalledWith(expect.objectContaining({ types }));
+ expect(mockIssuesCountsQueryResponse).toHaveBeenCalledWith(
+ expect.objectContaining({ types }),
+ );
});
});
});
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 4347c580a4d..42e9d348b16 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -37,6 +37,7 @@ export const getIssuesQueryResponse = {
userDiscussionsCount: 4,
webPath: 'project/-/issues/789',
webUrl: 'project/-/issues/789',
+ type: 'issue',
assignees: {
nodes: [
{
diff --git a/spec/frontend/issues/show/components/app_spec.js b/spec/frontend/issues/show/components/app_spec.js
index 27604b8ccf3..12f9707da04 100644
--- a/spec/frontend/issues/show/components/app_spec.js
+++ b/spec/frontend/issues/show/components/app_spec.js
@@ -119,7 +119,7 @@ describe('Issuable output', () => {
expect(findEdited().exists()).toBe(true);
expect(findEdited().props('updatedByPath')).toMatch(/\/some_user$/);
- expect(findEdited().props('updatedAt')).toBeTruthy();
+ expect(findEdited().props('updatedAt')).toBe(initialRequest.updated_at);
expect(wrapper.vm.state.lock_version).toBe(initialRequest.lock_version);
})
.then(() => {
@@ -133,7 +133,7 @@ describe('Issuable output', () => {
expect(findEdited().exists()).toBe(true);
expect(findEdited().props('updatedByName')).toBe('Other User');
expect(findEdited().props('updatedByPath')).toMatch(/\/other_user$/);
- expect(findEdited().props('updatedAt')).toBeTruthy();
+ expect(findEdited().props('updatedAt')).toBe(secondRequest.updated_at);
});
});
diff --git a/spec/frontend/issues/show/components/description_spec.js b/spec/frontend/issues/show/components/description_spec.js
index 8ee57f97754..bdb1448148e 100644
--- a/spec/frontend/issues/show/components/description_spec.js
+++ b/spec/frontend/issues/show/components/description_spec.js
@@ -249,7 +249,7 @@ describe('Description component', () => {
await nextTick();
expect(document.querySelector('.issuable-meta #task_status_short').textContent.trim()).toBe(
- '1/1 task',
+ '1/1 checklist item',
);
});
@@ -266,7 +266,7 @@ describe('Description component', () => {
});
});
- describe('with work items feature flag is enabled', () => {
+ describe('with work_items_create_from_markdown feature flag enabled', () => {
describe('empty description', () => {
beforeEach(() => {
createComponent({
@@ -275,7 +275,7 @@ describe('Description component', () => {
},
provide: {
glFeatures: {
- workItems: true,
+ workItemsCreateFromMarkdown: true,
},
},
});
@@ -295,7 +295,7 @@ describe('Description component', () => {
},
provide: {
glFeatures: {
- workItems: true,
+ workItemsCreateFromMarkdown: true,
},
},
});
@@ -344,7 +344,7 @@ describe('Description component', () => {
descriptionHtml: descriptionHtmlWithTask,
},
provide: {
- glFeatures: { workItems: true },
+ glFeatures: { workItemsCreateFromMarkdown: true },
},
});
return nextTick();
@@ -406,7 +406,7 @@ describe('Description component', () => {
createComponent({
props: { descriptionHtml: descriptionHtmlWithTask },
- provide: { glFeatures: { workItems: true } },
+ provide: { glFeatures: { workItemsCreateFromMarkdown: true } },
});
expect(showDetailsModal).toHaveBeenCalledTimes(modalOpened);
@@ -422,7 +422,7 @@ describe('Description component', () => {
descriptionHtml: descriptionHtmlWithTask,
},
provide: {
- glFeatures: { workItems: true },
+ glFeatures: { workItemsCreateFromMarkdown: true },
},
});
return nextTick();
diff --git a/spec/frontend/issues/show/components/edit_actions_spec.js b/spec/frontend/issues/show/components/edit_actions_spec.js
index 79368023d76..d58bf1be812 100644
--- a/spec/frontend/issues/show/components/edit_actions_spec.js
+++ b/spec/frontend/issues/show/components/edit_actions_spec.js
@@ -75,7 +75,7 @@ describe('Edit Actions component', () => {
it('renders all buttons as enabled', () => {
const buttons = findEditButtons().wrappers;
buttons.forEach((button) => {
- expect(button.attributes('disabled')).toBeFalsy();
+ expect(button.attributes('disabled')).toBeUndefined();
});
});
diff --git a/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js
new file mode 100644
index 00000000000..3ab2bb3460b
--- /dev/null
+++ b/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js
@@ -0,0 +1,189 @@
+import VueApollo from 'vue-apollo';
+import Vue from 'vue';
+import { GlDatepicker } from '@gitlab/ui';
+import { __, s__ } from '~/locale';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import CreateTimelineEvent from '~/issues/show/components/incidents/create_timeline_event.vue';
+import TimelineEventsForm from '~/issues/show/components/incidents/timeline_events_form.vue';
+import createTimelineEventMutation from '~/issues/show/components/incidents/graphql/queries/create_timeline_event.mutation.graphql';
+import getTimelineEvents from '~/issues/show/components/incidents/graphql/queries/get_timeline_events.query.graphql';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createAlert } from '~/flash';
+import { useFakeDate } from 'helpers/fake_date';
+import {
+ timelineEventsCreateEventResponse,
+ timelineEventsCreateEventError,
+ mockGetTimelineData,
+} from './mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+
+const fakeDate = '2020-07-08T00:00:00.000Z';
+
+const mockInputData = {
+ incidentId: 'gid://gitlab/Issue/1',
+ note: 'test',
+ occurredAt: '2020-07-08T00:00:00.000Z',
+};
+
+describe('Create Timeline events', () => {
+ useFakeDate(fakeDate);
+ let wrapper;
+ let responseSpy;
+ let apolloProvider;
+
+ const findSubmitButton = () => wrapper.findByText(__('Save'));
+ const findSubmitAndAddButton = () =>
+ wrapper.findByText(s__('Incident|Save and add another event'));
+ const findCancelButton = () => wrapper.findByText(__('Cancel'));
+ const findDatePicker = () => wrapper.findComponent(GlDatepicker);
+ const findNoteInput = () => wrapper.findByTestId('input-note');
+ const setNoteInput = () => {
+ const textarea = findNoteInput().element;
+ textarea.value = mockInputData.note;
+ textarea.dispatchEvent(new Event('input'));
+ };
+ const findHourInput = () => wrapper.findByTestId('input-hours');
+ const findMinuteInput = () => wrapper.findByTestId('input-minutes');
+ const setDatetime = () => {
+ const inputDate = new Date(mockInputData.occurredAt);
+ findDatePicker().vm.$emit('input', inputDate);
+ findHourInput().vm.$emit('input', inputDate.getHours());
+ findMinuteInput().vm.$emit('input', inputDate.getMinutes());
+ };
+ const fillForm = () => {
+ setDatetime();
+ setNoteInput();
+ };
+
+ function createMockApolloProvider() {
+ const requestHandlers = [[createTimelineEventMutation, responseSpy]];
+ const mockApollo = createMockApollo(requestHandlers);
+
+ mockApollo.clients.defaultClient.cache.writeQuery({
+ query: getTimelineEvents,
+ data: mockGetTimelineData,
+ variables: {
+ fullPath: 'group/project',
+ incidentId: 'gid://gitlab/Issue/1',
+ },
+ });
+
+ return mockApollo;
+ }
+
+ const mountComponent = () => {
+ wrapper = mountExtended(CreateTimelineEvent, {
+ propsData: {
+ hasTimelineEvents: true,
+ },
+ provide: {
+ fullPath: 'group/project',
+ issuableId: '1',
+ },
+ apolloProvider,
+ });
+ };
+
+ beforeEach(() => {
+ responseSpy = jest.fn().mockResolvedValue(timelineEventsCreateEventResponse);
+ apolloProvider = createMockApolloProvider();
+ });
+
+ afterEach(() => {
+ createAlert.mockReset();
+ wrapper.destroy();
+ });
+
+ describe('createIncidentTimelineEvent', () => {
+ const closeFormEvent = { 'hide-new-timeline-events-form': [[]] };
+
+ const expectedData = {
+ input: mockInputData,
+ };
+
+ beforeEach(() => {
+ mountComponent();
+ fillForm();
+ });
+
+ describe('on submit', () => {
+ beforeEach(async () => {
+ findSubmitButton().trigger('click');
+ await waitForPromises();
+ });
+
+ it('should call the mutation with the right variables', () => {
+ expect(responseSpy).toHaveBeenCalledWith(expectedData);
+ });
+
+ it('should close the form on successful addition', () => {
+ expect(wrapper.emitted()).toEqual(closeFormEvent);
+ });
+ });
+
+ describe('on submit and add', () => {
+ beforeEach(async () => {
+ findSubmitAndAddButton().trigger('click');
+ await waitForPromises();
+ });
+
+ it('should keep the form open for save and add another', () => {
+ expect(wrapper.emitted()).toEqual({});
+ });
+ });
+
+ describe('on cancel', () => {
+ beforeEach(async () => {
+ findCancelButton().trigger('click');
+ await waitForPromises();
+ });
+
+ it('should close the form', () => {
+ expect(wrapper.emitted()).toEqual(closeFormEvent);
+ });
+ });
+ });
+
+ describe('error handling', () => {
+ it('should show an error when submission returns an error', async () => {
+ const expectedAlertArgs = {
+ message: `Error creating incident timeline event: ${timelineEventsCreateEventError.data.timelineEventCreate.errors[0]}`,
+ };
+ responseSpy.mockResolvedValueOnce(timelineEventsCreateEventError);
+ mountComponent();
+
+ findSubmitButton().trigger('click');
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
+ });
+
+ it('should show an error when submission fails', async () => {
+ const expectedAlertArgs = {
+ captureError: true,
+ error: new Error(),
+ message: 'Something went wrong while creating the incident timeline event.',
+ };
+ responseSpy.mockRejectedValueOnce();
+ mountComponent();
+
+ findSubmitButton().trigger('click');
+ await waitForPromises();
+
+ expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
+ });
+
+ it('should keep the form open on failed addition', async () => {
+ responseSpy.mockResolvedValueOnce(timelineEventsCreateEventError);
+ mountComponent();
+
+ await wrapper.findComponent(TimelineEventsForm).vm.$emit('save-event', mockInputData);
+ await waitForPromises;
+ expect(wrapper.emitted()).toEqual({});
+ });
+ });
+});
diff --git a/spec/frontend/issues/show/components/incidents/mock_data.js b/spec/frontend/issues/show/components/incidents/mock_data.js
index afc6099caf4..75c0a7350ae 100644
--- a/spec/frontend/issues/show/components/incidents/mock_data.js
+++ b/spec/frontend/issues/show/components/incidents/mock_data.js
@@ -72,10 +72,14 @@ export const timelineEventsQueryEmptyResponse = {
};
export const timelineEventsCreateEventResponse = {
- timelineEvent: {
- ...mockEvents[0],
+ data: {
+ timelineEventCreate: {
+ timelineEvent: {
+ ...mockEvents[0],
+ },
+ errors: [],
+ },
},
- errors: [],
};
export const timelineEventsCreateEventError = {
@@ -103,3 +107,21 @@ const timelineEventDeleteData = (errors = []) => {
export const timelineEventsDeleteEventResponse = timelineEventDeleteData();
export const timelineEventsDeleteEventError = timelineEventDeleteData(['Item does not exist']);
+
+export const mockGetTimelineData = {
+ project: {
+ id: 'gid://gitlab/Project/19',
+ incidentManagementTimelineEvents: {
+ nodes: [
+ {
+ id: 'gid://gitlab/IncidentManagement::TimelineEvent/8',
+ note: 'another one2',
+ noteHtml: '<p>another one2</p>',
+ action: 'comment',
+ occurredAt: '2022-07-01T12:47:00Z',
+ createdAt: '2022-07-20T12:47:40Z',
+ },
+ ],
+ },
+ },
+};
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
index 620cdfc53b0..cd2cbb63246 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
@@ -3,49 +3,33 @@ import Vue, { nextTick } from 'vue';
import { GlDatepicker } from '@gitlab/ui';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import IncidentTimelineEventForm from '~/issues/show/components/incidents/timeline_events_form.vue';
-import createTimelineEventMutation from '~/issues/show/components/incidents/graphql/queries/create_timeline_event.mutation.graphql';
-import createMockApollo from 'helpers/mock_apollo_helper';
+import TimelineEventsForm from '~/issues/show/components/incidents/timeline_events_form.vue';
import { createAlert } from '~/flash';
import { useFakeDate } from 'helpers/fake_date';
-import { timelineEventsCreateEventResponse, timelineEventsCreateEventError } from './mock_data';
Vue.use(VueApollo);
jest.mock('~/flash');
-const addEventResponse = jest.fn().mockResolvedValue(timelineEventsCreateEventResponse);
-
-function createMockApolloProvider(response = addEventResponse) {
- const requestHandlers = [[createTimelineEventMutation, response]];
- return createMockApollo(requestHandlers);
-}
+const fakeDate = '2020-07-08T00:00:00.000Z';
describe('Timeline events form', () => {
// July 8 2020
- useFakeDate(2020, 6, 8);
+ useFakeDate(fakeDate);
let wrapper;
- const mountComponent = ({ mockApollo, mountMethod = shallowMountExtended, stubs }) => {
- wrapper = mountMethod(IncidentTimelineEventForm, {
+ const mountComponent = ({ mountMethod = shallowMountExtended }) => {
+ wrapper = mountMethod(TimelineEventsForm, {
propsData: {
hasTimelineEvents: true,
+ isEventProcessed: false,
},
- provide: {
- fullPath: 'group/project',
- issuableId: '1',
- },
- apolloProvider: mockApollo,
- stubs,
});
};
afterEach(() => {
- addEventResponse.mockReset();
createAlert.mockReset();
- if (wrapper) {
- wrapper.destroy();
- }
+ wrapper.destroy();
});
const findSubmitButton = () => wrapper.findByText('Save');
@@ -75,24 +59,28 @@ describe('Timeline events form', () => {
};
describe('form button behaviour', () => {
- const closeFormEvent = { 'hide-incident-timeline-event-form': [[]] };
beforeEach(() => {
- mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
+ mountComponent({ mountMethod: mountExtended });
});
- it('should close the form on submit', async () => {
+ it('should save event on submit', async () => {
await submitForm();
- expect(wrapper.emitted()).toEqual(closeFormEvent);
+
+ expect(wrapper.emitted()).toEqual({
+ 'save-event': [[{ note: '', occurredAt: fakeDate }, false]],
+ });
});
- it('should not close the form on "submit and add another"', async () => {
+ it('should save event on "submit and add another"', async () => {
await submitFormAndAddAnother();
- expect(wrapper.emitted()).toEqual({});
+ expect(wrapper.emitted()).toEqual({
+ 'save-event': [[{ note: '', occurredAt: fakeDate }, true]],
+ });
});
- it('should close the form on cancel', async () => {
+ it('should emit cancel on cancel', async () => {
await cancelForm();
- expect(wrapper.emitted()).toEqual(closeFormEvent);
+ expect(wrapper.emitted()).toEqual({ cancel: [[]] });
});
it('should clear the form', async () => {
@@ -111,71 +99,4 @@ describe('Timeline events form', () => {
expect(findMinuteInput().element.value).toBe('0');
});
});
-
- describe('addTimelineEventQuery', () => {
- const expectedData = {
- input: {
- incidentId: 'gid://gitlab/Issue/1',
- note: '',
- occurredAt: '2020-07-08T00:00:00.000Z',
- },
- };
-
- let mockApollo;
-
- beforeEach(() => {
- mockApollo = createMockApolloProvider();
- mountComponent({ mockApollo, mountMethod: mountExtended });
- });
-
- it('should call the mutation with the right variables', async () => {
- await submitForm();
-
- expect(addEventResponse).toHaveBeenCalledWith(expectedData);
- });
-
- it('should call the mutation with user selected variables', async () => {
- const expectedUserSelectedData = {
- input: {
- ...expectedData.input,
- occurredAt: '2021-08-12T05:45:00.000Z',
- },
- };
-
- setDatetime();
-
- await nextTick();
- await submitForm();
-
- expect(addEventResponse).toHaveBeenCalledWith(expectedUserSelectedData);
- });
- });
-
- describe('error handling', () => {
- it('should show an error when submission returns an error', async () => {
- const expectedAlertArgs = {
- message: 'Error creating incident timeline event: Create error',
- };
- addEventResponse.mockResolvedValueOnce(timelineEventsCreateEventError);
- mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
-
- await submitForm();
-
- expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
- });
-
- it('should show an error when submission fails', async () => {
- const expectedAlertArgs = {
- captureError: true,
- error: new Error(),
- message: 'Something went wrong while creating the incident timeline event.',
- };
- addEventResponse.mockRejectedValueOnce();
- mountComponent({ mockApollo: createMockApolloProvider(), mountMethod: mountExtended });
-
- await submitForm();
-
- expect(createAlert).toHaveBeenCalledWith(expectedAlertArgs);
- });
- });
});
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_item_spec.js
index e686f2eb4ec..90e55003ab3 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_list_item_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_item_spec.js
@@ -2,7 +2,7 @@ import timezoneMock from 'timezone-mock';
import { GlIcon, GlDropdown } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_list_item.vue';
+import IncidentTimelineEventItem from '~/issues/show/components/incidents/timeline_events_item.vue';
import { mockEvents } from './mock_data';
describe('IncidentTimelineEventList', () => {
@@ -10,7 +10,7 @@ describe('IncidentTimelineEventList', () => {
const mountComponent = ({ propsData, provide } = {}) => {
const { action, noteHtml, occurredAt } = mockEvents[0];
- wrapper = mountExtended(IncidentTimelineEventListItem, {
+ wrapper = mountExtended(IncidentTimelineEventItem, {
propsData: {
action,
noteHtml,
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
index ae07237cf7d..4d2d53c990e 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_list_spec.js
@@ -3,7 +3,7 @@ import VueApollo from 'vue-apollo';
import Vue from 'vue';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import IncidentTimelineEventList from '~/issues/show/components/incidents/timeline_events_list.vue';
-import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_list_item.vue';
+import IncidentTimelineEventListItem from '~/issues/show/components/incidents/timeline_events_item.vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import deleteTimelineEventMutation from '~/issues/show/components/incidents/graphql/queries/delete_timeline_event.mutation.graphql';
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
index 2d87851a761..2cdb971395d 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_tab_spec.js
@@ -5,7 +5,7 @@ import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_help
import waitForPromises from 'helpers/wait_for_promises';
import TimelineEventsTab from '~/issues/show/components/incidents/timeline_events_tab.vue';
import IncidentTimelineEventsList from '~/issues/show/components/incidents/timeline_events_list.vue';
-import IncidentTimelineEventForm from '~/issues/show/components/incidents/timeline_events_form.vue';
+import CreateTimelineEvent from '~/issues/show/components/incidents/create_timeline_event.vue';
import timelineEventsQuery from '~/issues/show/components/incidents/graphql/queries/get_timeline_events.query.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import { createAlert } from '~/flash';
@@ -53,7 +53,7 @@ describe('TimelineEventsTab', () => {
const findLoadingSpinner = () => wrapper.findComponent(GlLoadingIcon);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findTimelineEventsList = () => wrapper.findComponent(IncidentTimelineEventsList);
- const findTimelineEventForm = () => wrapper.findComponent(IncidentTimelineEventForm);
+ const findCreateTimelineEvent = () => wrapper.findComponent(CreateTimelineEvent);
const findAddEventButton = () => wrapper.findByText(timelineTabI18n.addEventButton);
describe('Timeline events tab', () => {
@@ -143,18 +143,18 @@ describe('TimelineEventsTab', () => {
});
it('should not show a form by default', () => {
- expect(findTimelineEventForm().isVisible()).toBe(false);
+ expect(findCreateTimelineEvent().isVisible()).toBe(false);
});
it('should show a form when button is clicked', async () => {
await findAddEventButton().trigger('click');
- expect(findTimelineEventForm().isVisible()).toBe(true);
+ expect(findCreateTimelineEvent().isVisible()).toBe(true);
});
it('should clear the form when button is clicked', async () => {
const mockClear = jest.fn();
- wrapper.vm.$refs.eventForm.clear = mockClear;
+ wrapper.vm.$refs.createEventForm.clearForm = mockClear;
await findAddEventButton().trigger('click');
@@ -165,9 +165,9 @@ describe('TimelineEventsTab', () => {
// open the form
await findAddEventButton().trigger('click');
- await findTimelineEventForm().vm.$emit('hide-incident-timeline-event-form');
+ await findCreateTimelineEvent().vm.$emit('hide-new-timeline-events-form');
- expect(findTimelineEventForm().isVisible()).toBe(false);
+ expect(findCreateTimelineEvent().isVisible()).toBe(false);
});
});
});
diff --git a/spec/frontend/issues/show/components/incidents/utils_spec.js b/spec/frontend/issues/show/components/incidents/utils_spec.js
index 0da0114c654..d3a86680f14 100644
--- a/spec/frontend/issues/show/components/incidents/utils_spec.js
+++ b/spec/frontend/issues/show/components/incidents/utils_spec.js
@@ -24,7 +24,7 @@ describe('incident utils', () => {
describe('get event icon', () => {
it('should display a matching event icon name', () => {
- ['comment', 'issues', 'status'].forEach((name) => {
+ ['comment', 'issues', 'label', 'status'].forEach((name) => {
expect(getEventIcon(name)).toBe(name);
});
});
diff --git a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
index 136a5967ee4..b0218a9df12 100644
--- a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
+++ b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
@@ -148,7 +148,7 @@ describe('ProjectDropdown', () => {
});
it('emits `error` event', () => {
- expect(wrapper.emitted('error')).toBeTruthy();
+ expect(wrapper.emitted('error')).toHaveLength(1);
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/components/add_namespace_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/add_namespace_button_spec.js
index 5ec1b7b7932..9f92ad2adc1 100644
--- a/spec/frontend/jira_connect/subscriptions/components/add_namespace_button_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/add_namespace_button_spec.js
@@ -38,7 +38,6 @@ describe('AddNamespaceButton', () => {
it('button is bound to the modal', () => {
const { value } = getBinding(findButton().element, 'gl-modal');
- expect(value).toBeTruthy();
expect(value).toBe(ADD_NAMESPACE_MODAL_ID);
});
});
diff --git a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
index 8f79c74368f..ed0abaaf576 100644
--- a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
@@ -128,7 +128,7 @@ describe('SignInOauthButton', () => {
});
it('does not emit `sign-in` event', () => {
- expect(wrapper.emitted('sign-in')).toBeFalsy();
+ expect(wrapper.emitted('sign-in')).toBeUndefined();
});
it('sets `loading` prop of button to `false`', () => {
@@ -179,7 +179,7 @@ describe('SignInOauthButton', () => {
});
it('emits `sign-in` event with user data', () => {
- expect(wrapper.emitted('sign-in')[0]).toBeTruthy();
+ expect(wrapper.emitted('sign-in')).toHaveLength(1);
});
});
@@ -200,7 +200,7 @@ describe('SignInOauthButton', () => {
});
it('does not emit `sign-in` event', () => {
- expect(wrapper.emitted('sign-in')).toBeFalsy();
+ expect(wrapper.emitted('sign-in')).toBeUndefined();
});
it('sets `loading` prop of button to `false`', () => {
diff --git a/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_page_spec.js b/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_page_spec.js
index 65b08fba592..c12a45b2f41 100644
--- a/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_page_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/pages/sign_in/sign_in_page_spec.js
@@ -68,7 +68,7 @@ describe('SignInPage', () => {
describe('when error event is emitted', () => {
it('emits another error event', () => {
findSignInGitlabCom().vm.$emit('error');
- expect(wrapper.emitted('error')[0]).toBeTruthy();
+ expect(wrapper.emitted('error')).toHaveLength(1);
});
});
diff --git a/spec/frontend/jobs/components/job_log_controllers_spec.js b/spec/frontend/jobs/components/job_log_controllers_spec.js
index cc97d111c06..aa85253a177 100644
--- a/spec/frontend/jobs/components/job_log_controllers_spec.js
+++ b/spec/frontend/jobs/components/job_log_controllers_spec.js
@@ -1,8 +1,9 @@
import { GlSearchBoxByClick } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import JobLogControllers from '~/jobs/components/job_log_controllers.vue';
import HelpPopover from '~/vue_shared/components/help_popover.vue';
+import { backoffMockImplementation } from 'helpers/backoff_helper';
+import * as commonUtils from '~/lib/utils/common_utils';
import { mockJobLog } from '../mock_data';
const mockToastShow = jest.fn();
@@ -10,10 +11,15 @@ const mockToastShow = jest.fn();
describe('Job log controllers', () => {
let wrapper;
+ beforeEach(() => {
+ jest.spyOn(commonUtils, 'backOff').mockImplementation(backoffMockImplementation);
+ });
+
afterEach(() => {
if (wrapper?.destroy) {
wrapper.destroy();
}
+ commonUtils.backOff.mockReset();
});
const defaultProps = {
@@ -24,10 +30,11 @@ describe('Job log controllers', () => {
isScrollBottomDisabled: false,
isScrollingDown: true,
isJobLogSizeVisible: true,
+ isComplete: true,
jobLog: mockJobLog,
};
- const createWrapper = (props, jobLogSearch = false) => {
+ const createWrapper = (props, { jobLogJumpToFailures = false } = {}) => {
wrapper = mount(JobLogControllers, {
propsData: {
...defaultProps,
@@ -35,7 +42,7 @@ describe('Job log controllers', () => {
},
provide: {
glFeatures: {
- jobLogSearch,
+ jobLogJumpToFailures,
},
},
data() {
@@ -58,6 +65,7 @@ describe('Job log controllers', () => {
const findScrollBottom = () => wrapper.find('[data-testid="job-controller-scroll-bottom"]');
const findJobLogSearch = () => wrapper.findComponent(GlSearchBoxByClick);
const findSearchHelp = () => wrapper.findComponent(HelpPopover);
+ const findScrollFailure = () => wrapper.find('[data-testid="job-controller-scroll-to-failure"]');
describe('Truncate information', () => {
describe('with isJobLogSizeVisible', () => {
@@ -109,9 +117,7 @@ describe('Job log controllers', () => {
});
it('emits scrollJobLogTop event on click', async () => {
- findScrollTop().trigger('click');
-
- await nextTick();
+ await findScrollTop().trigger('click');
expect(wrapper.emitted().scrollJobLogTop).toHaveLength(1);
});
@@ -131,9 +137,7 @@ describe('Job log controllers', () => {
});
it('does not emit scrollJobLogTop event on click', async () => {
- findScrollTop().trigger('click');
-
- await nextTick();
+ await findScrollTop().trigger('click');
expect(wrapper.emitted().scrollJobLogTop).toBeUndefined();
});
@@ -147,9 +151,7 @@ describe('Job log controllers', () => {
});
it('emits scrollJobLogBottom event on click', async () => {
- findScrollBottom().trigger('click');
-
- await nextTick();
+ await findScrollBottom().trigger('click');
expect(wrapper.emitted().scrollJobLogBottom).toHaveLength(1);
});
@@ -169,9 +171,7 @@ describe('Job log controllers', () => {
});
it('does not emit scrollJobLogBottom event on click', async () => {
- findScrollBottom().trigger('click');
-
- await nextTick();
+ await findScrollBottom().trigger('click');
expect(wrapper.emitted().scrollJobLogBottom).toBeUndefined();
});
@@ -201,41 +201,115 @@ describe('Job log controllers', () => {
});
});
});
- });
- describe('Job log search', () => {
- describe('with feature flag off', () => {
- it('does not display job log search', () => {
- createWrapper();
+ describe('scroll to failure button', () => {
+ describe('with feature flag disabled', () => {
+ it('does not display button', () => {
+ createWrapper();
- expect(findJobLogSearch().exists()).toBe(false);
- expect(findSearchHelp().exists()).toBe(false);
+ expect(findScrollFailure().exists()).toBe(false);
+ });
});
- });
- describe('with feature flag on', () => {
- beforeEach(() => {
- createWrapper({}, { jobLogSearch: true });
- });
+ describe('with red text failures on the page', () => {
+ let firstFailure;
+ let secondFailure;
+
+ beforeEach(() => {
+ jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce(['mock-element']);
+
+ createWrapper({}, { jobLogJumpToFailures: true });
+
+ firstFailure = document.createElement('div');
+ firstFailure.className = 'term-fg-l-red';
+ document.body.appendChild(firstFailure);
+
+ secondFailure = document.createElement('div');
+ secondFailure.className = 'term-fg-l-red';
+ document.body.appendChild(secondFailure);
+ });
+
+ afterEach(() => {
+ if (firstFailure) {
+ firstFailure.remove();
+ firstFailure = null;
+ }
+
+ if (secondFailure) {
+ secondFailure.remove();
+ secondFailure = null;
+ }
+ });
+
+ it('is enabled', () => {
+ expect(findScrollFailure().props('disabled')).toBe(false);
+ });
+
+ it('scrolls to each failure', async () => {
+ jest.spyOn(firstFailure, 'scrollIntoView');
- it('displays job log search', () => {
- expect(findJobLogSearch().exists()).toBe(true);
- expect(findSearchHelp().exists()).toBe(true);
+ await findScrollFailure().trigger('click');
+
+ expect(firstFailure.scrollIntoView).toHaveBeenCalled();
+
+ await findScrollFailure().trigger('click');
+
+ expect(secondFailure.scrollIntoView).toHaveBeenCalled();
+
+ await findScrollFailure().trigger('click');
+
+ expect(firstFailure.scrollIntoView).toHaveBeenCalled();
+ });
});
- it('emits search results', () => {
- const expectedSearchResults = [[[mockJobLog[6].lines[1], mockJobLog[6].lines[2]]]];
+ describe('with no red text failures on the page', () => {
+ beforeEach(() => {
+ jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce([]);
- findJobLogSearch().vm.$emit('submit');
+ createWrapper({}, { jobLogJumpToFailures: true });
+ });
- expect(wrapper.emitted('searchResults')).toEqual(expectedSearchResults);
+ it('is disabled', () => {
+ expect(findScrollFailure().props('disabled')).toBe(true);
+ });
});
- it('clears search results', () => {
- findJobLogSearch().vm.$emit('clear');
+ describe('when the job log is not complete', () => {
+ beforeEach(() => {
+ jest.spyOn(document, 'querySelectorAll').mockReturnValueOnce(['mock-element']);
+
+ createWrapper({ isComplete: false }, { jobLogJumpToFailures: true });
+ });
- expect(wrapper.emitted('searchResults')).toEqual([[[]]]);
+ it('is enabled', () => {
+ expect(findScrollFailure().props('disabled')).toBe(false);
+ });
});
});
});
+
+ describe('Job log search', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('displays job log search', () => {
+ expect(findJobLogSearch().exists()).toBe(true);
+ expect(findSearchHelp().exists()).toBe(true);
+ });
+
+ it('emits search results', () => {
+ const expectedSearchResults = [[[mockJobLog[6].lines[1], mockJobLog[6].lines[2]]]];
+
+ findJobLogSearch().vm.$emit('submit');
+
+ expect(wrapper.emitted('searchResults')).toEqual(expectedSearchResults);
+ });
+
+ it('clears search results', () => {
+ findJobLogSearch().vm.$emit('clear');
+
+ expect(wrapper.emitted('searchResults')).toEqual([[[]]]);
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/sidebar_detail_row_spec.js b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
index 43f2e022dd8..8d2680608ab 100644
--- a/spec/frontend/jobs/components/sidebar_detail_row_spec.js
+++ b/spec/frontend/jobs/components/sidebar_detail_row_spec.js
@@ -7,7 +7,7 @@ describe('Sidebar detail row', () => {
const title = 'this is the title';
const value = 'this is the value';
- const helpUrl = '/help/ci/runners/index.html';
+ const helpUrl = 'https://docs.gitlab.com/runner/register/index.html';
const findHelpLink = () => wrapper.findComponent(GlLink);
diff --git a/spec/frontend/labels/labels_select_spec.js b/spec/frontend/labels/labels_select_spec.js
index f6e280564cc..63f7c725bc7 100644
--- a/spec/frontend/labels/labels_select_spec.js
+++ b/spec/frontend/labels/labels_select_spec.js
@@ -101,6 +101,12 @@ describe('LabelsSelect', () => {
expect($labelEl.find('a').attr('data-html')).toBe('true');
});
+ it('generated label item template has correct title for tooltip', () => {
+ expect($labelEl.find('a').attr('title')).toBe(
+ "<span class='font-weight-bold scoped-label-tooltip-title'>Scoped label</span><br>Foobar",
+ );
+ });
+
it('generated label item template has correct label styles and classes', () => {
expect($labelEl.find('span.gl-label-text').attr('style')).toBe(
`background-color: ${label.color};`,
diff --git a/spec/frontend/lib/dompurify_spec.js b/spec/frontend/lib/dompurify_spec.js
index b585c69e911..29b927ef628 100644
--- a/spec/frontend/lib/dompurify_spec.js
+++ b/spec/frontend/lib/dompurify_spec.js
@@ -173,4 +173,50 @@ describe('~/lib/dompurify', () => {
expect(sanitize(html)).toBe(`<a>internal link</a>`);
});
});
+
+ describe('links with target attribute', () => {
+ const getSanitizedNode = (html) => {
+ return document.createRange().createContextualFragment(sanitize(html)).firstElementChild;
+ };
+
+ it('adds secure context', () => {
+ const html = `<a href="https://example.com" target="_blank">link</a>`;
+ const el = getSanitizedNode(html);
+
+ expect(el.getAttribute('target')).toBe('_blank');
+ expect(el.getAttribute('rel')).toBe('noopener noreferrer');
+ });
+
+ it('adds secure context and merge existing `rel` values', () => {
+ const html = `<a href="https://example.com" target="_blank" rel="help External">link</a>`;
+ const el = getSanitizedNode(html);
+
+ expect(el.getAttribute('target')).toBe('_blank');
+ expect(el.getAttribute('rel')).toBe('help external noopener noreferrer');
+ });
+
+ it('does not duplicate noopener/noreferrer `rel` values', () => {
+ const html = `<a href="https://example.com" target="_blank" rel="noreferrer noopener">link</a>`;
+ const el = getSanitizedNode(html);
+
+ expect(el.getAttribute('target')).toBe('_blank');
+ expect(el.getAttribute('rel')).toBe('noreferrer noopener');
+ });
+
+ it('does not update `rel` values when target is not `_blank` ', () => {
+ const html = `<a href="https://example.com" target="_self" rel="help">internal</a>`;
+ const el = getSanitizedNode(html);
+
+ expect(el.getAttribute('target')).toBe('_self');
+ expect(el.getAttribute('rel')).toBe('help');
+ });
+
+ it('does not update `rel` values when target attribute is not present', () => {
+ const html = `<a href="https://example.com">link</a>`;
+ const el = getSanitizedNode(html);
+
+ expect(el.hasAttribute('target')).toBe(false);
+ expect(el.hasAttribute('rel')).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/lib/gfm/index_spec.js b/spec/frontend/lib/gfm/index_spec.js
index b722315d63a..f53f809b799 100644
--- a/spec/frontend/lib/gfm/index_spec.js
+++ b/spec/frontend/lib/gfm/index_spec.js
@@ -96,26 +96,164 @@ describe('gfm', () => {
);
});
});
- });
- describe('when skipping the rendering of code blocks', () => {
- it('transforms code nodes into codeblock html tags', async () => {
- const result = await markdownToAST(
- `
+ describe('when skipping the rendering of code blocks', () => {
+ it('transforms code nodes into codeblock html tags', async () => {
+ const result = await markdownToAST(
+ `
\`\`\`javascript
console.log('Hola');
\`\`\`\
`,
- ['code'],
- );
+ ['code'],
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ tagName: 'codeblock',
+ properties: {
+ language: 'javascript',
+ },
+ }),
+ );
+ });
+ });
+
+ describe('when skipping the rendering of reference definitions', () => {
+ it('transforms code nodes into codeblock html tags', async () => {
+ const result = await markdownToAST(
+ `
+[gitlab][gitlab]
+
+[gitlab]: https://gitlab.com "GitLab"
+ `,
+ ['definition'],
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'referencedefinition',
+ properties: {
+ identifier: 'gitlab',
+ title: 'GitLab',
+ url: 'https://gitlab.com',
+ },
+ children: [
+ {
+ type: 'text',
+ value: '[gitlab]: https://gitlab.com "GitLab"',
+ },
+ ],
+ }),
+ );
+ });
+ });
+
+ describe('when skipping the rendering of link and image references', () => {
+ it('transforms linkReference and imageReference nodes into html tags', async () => {
+ const result = await markdownToAST(
+ `
+[gitlab][gitlab] and ![GitLab Logo][gitlab-logo]
+
+[gitlab]: https://gitlab.com "GitLab"
+[gitlab-logo]: https://gitlab.com/gitlab-logo.png "GitLab Logo"
+ `,
+ ['linkReference', 'imageReference'],
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ tagName: 'p',
+ children: expect.arrayContaining([
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'a',
+ properties: expect.objectContaining({
+ href: 'https://gitlab.com',
+ isReference: 'true',
+ identifier: 'gitlab',
+ title: 'GitLab',
+ }),
+ }),
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'img',
+ properties: expect.objectContaining({
+ src: 'https://gitlab.com/gitlab-logo.png',
+ isReference: 'true',
+ identifier: 'gitlab-logo',
+ title: 'GitLab Logo',
+ alt: 'GitLab Logo',
+ }),
+ }),
+ ]),
+ }),
+ );
+ });
+
+ it('normalizes the urls extracted from the reference definitions', async () => {
+ const result = await markdownToAST(
+ `
+[gitlab][gitlab] and ![GitLab Logo][gitlab]
+
+[gitlab]: /url\\bar*baz
+ `,
+ ['linkReference', 'imageReference'],
+ );
+
+ expectInRoot(
+ result,
+ expect.objectContaining({
+ tagName: 'p',
+ children: expect.arrayContaining([
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'a',
+ properties: expect.objectContaining({
+ href: '/url%5Cbar*baz',
+ }),
+ }),
+ expect.objectContaining({
+ type: 'element',
+ tagName: 'img',
+ properties: expect.objectContaining({
+ src: '/url%5Cbar*baz',
+ }),
+ }),
+ ]),
+ }),
+ );
+ });
+ });
+ });
+
+ describe('when skipping the rendering of frontmatter types', () => {
+ it.each`
+ type | input
+ ${'yaml'} | ${'---\ntitle: page\n---'}
+ ${'toml'} | ${'+++\ntitle: page\n+++'}
+ ${'json'} | ${';;;\ntitle: page\n;;;'}
+ `('transforms $type nodes into frontmatter html tags', async ({ input, type }) => {
+ const result = await markdownToAST(input, [type]);
expectInRoot(
result,
expect.objectContaining({
- tagName: 'codeblock',
+ type: 'element',
+ tagName: 'frontmatter',
properties: {
- language: 'javascript',
+ language: type,
},
+ children: [
+ {
+ type: 'text',
+ value: 'title: page',
+ },
+ ],
}),
);
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 7cf101a5e59..a2ace8857ed 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -292,16 +292,11 @@ describe('common_utils', () => {
const spy = jest.fn();
const debouncedSpy = commonUtils.debounceByAnimationFrame(spy);
- return new Promise((resolve) => {
- window.requestAnimationFrame(() => {
- debouncedSpy();
- debouncedSpy();
- window.requestAnimationFrame(() => {
- expect(spy).toHaveBeenCalledTimes(1);
- resolve();
- });
- });
- });
+ debouncedSpy();
+ debouncedSpy();
+ jest.runOnlyPendingTimers();
+
+ expect(spy).toHaveBeenCalledTimes(1);
});
});
@@ -633,7 +628,7 @@ describe('common_utils', () => {
it('returns an empty object if `conversionFunction` parameter is not a function', () => {
const result = commonUtils.convertObjectProps(null, mockObjects.convertObjectProps.obj);
- expect(isEmptyObject(result)).toBeTruthy();
+ expect(isEmptyObject(result)).toBe(true);
});
});
@@ -650,9 +645,9 @@ describe('common_utils', () => {
: commonUtils[functionName];
it('returns an empty object if `obj` parameter is null, undefined or an empty object', () => {
- expect(isEmptyObject(testFunction(null))).toBeTruthy();
- expect(isEmptyObject(testFunction())).toBeTruthy();
- expect(isEmptyObject(testFunction({}))).toBeTruthy();
+ expect(isEmptyObject(testFunction(null))).toBe(true);
+ expect(isEmptyObject(testFunction())).toBe(true);
+ expect(isEmptyObject(testFunction({}))).toBe(true);
});
it('converts object properties', () => {
diff --git a/spec/frontend/lib/utils/confirm_via_gl_modal/confirm_modal_spec.js b/spec/frontend/lib/utils/confirm_via_gl_modal/confirm_modal_spec.js
index d6131b1a1d7..313e028d861 100644
--- a/spec/frontend/lib/utils/confirm_via_gl_modal/confirm_modal_spec.js
+++ b/spec/frontend/lib/utils/confirm_via_gl_modal/confirm_modal_spec.js
@@ -42,12 +42,12 @@ describe('Confirm Modal', () => {
it('should emit `confirmed` event on `primary` modal event', () => {
findGlModal().vm.$emit('primary');
- expect(wrapper.emitted('confirmed')).toBeTruthy();
+ expect(wrapper.emitted('confirmed')).toHaveLength(1);
});
it('should emit closed` event on `hidden` modal event', () => {
modal.vm.$emit('hidden');
- expect(wrapper.emitted('closed')).toBeTruthy();
+ expect(wrapper.emitted('closed')).toHaveLength(1);
});
});
diff --git a/spec/frontend/lib/utils/rails_ujs_spec.js b/spec/frontend/lib/utils/rails_ujs_spec.js
index c10301523c9..da9cc5c6f3c 100644
--- a/spec/frontend/lib/utils/rails_ujs_spec.js
+++ b/spec/frontend/lib/utils/rails_ujs_spec.js
@@ -18,14 +18,12 @@ function mockXHRResponse({ responseText, responseContentType } = {}) {
.mockReturnValue(responseContentType);
jest.spyOn(global.XMLHttpRequest.prototype, 'send').mockImplementation(function send() {
- requestAnimationFrame(() => {
- Object.defineProperties(this, {
- readyState: { value: XMLHttpRequest.DONE },
- status: { value: 200 },
- response: { value: responseText },
- });
- this.onreadystatechange();
+ Object.defineProperties(this, {
+ readyState: { value: XMLHttpRequest.DONE },
+ status: { value: 200 },
+ response: { value: responseText },
});
+ this.onreadystatechange();
});
}
diff --git a/spec/frontend/lib/utils/recurrence_spec.js b/spec/frontend/lib/utils/recurrence_spec.js
index fc22529dffc..8bf3ea4e25a 100644
--- a/spec/frontend/lib/utils/recurrence_spec.js
+++ b/spec/frontend/lib/utils/recurrence_spec.js
@@ -211,9 +211,10 @@ describe('recurrence', () => {
describe('eject', () => {
it('removes the handler assigned to the particular count slot', () => {
- recurInstance.handle(1, jest.fn());
+ const func = jest.fn();
+ recurInstance.handle(1, func);
- expect(recurInstance.handlers[1]).toBeTruthy();
+ expect(recurInstance.handlers[1]).toStrictEqual(func);
recurInstance.eject(1);
diff --git a/spec/frontend/lib/utils/sticky_spec.js b/spec/frontend/lib/utils/sticky_spec.js
index 01e8fe777af..ec9e746c838 100644
--- a/spec/frontend/lib/utils/sticky_spec.js
+++ b/spec/frontend/lib/utils/sticky_spec.js
@@ -34,13 +34,13 @@ describe('sticky', () => {
isSticky(el, 0, el.offsetTop);
isSticky(el, 0, el.offsetTop);
- expect(el.classList.contains('is-stuck')).toBeTruthy();
+ expect(el.classList.contains('is-stuck')).toBe(true);
});
it('adds is-stuck class', () => {
isSticky(el, 0, el.offsetTop);
- expect(el.classList.contains('is-stuck')).toBeTruthy();
+ expect(el.classList.contains('is-stuck')).toBe(true);
});
it('inserts placeholder element', () => {
@@ -64,7 +64,7 @@ describe('sticky', () => {
it('does not add is-stuck class', () => {
isSticky(el, 0, 0);
- expect(el.classList.contains('is-stuck')).toBeFalsy();
+ expect(el.classList.contains('is-stuck')).toBe(false);
});
it('removes placeholder', () => {
diff --git a/spec/frontend/lib/utils/text_markdown_spec.js b/spec/frontend/lib/utils/text_markdown_spec.js
index d1bca3c73b6..733d89fe08c 100644
--- a/spec/frontend/lib/utils/text_markdown_spec.js
+++ b/spec/frontend/lib/utils/text_markdown_spec.js
@@ -193,6 +193,7 @@ describe('init markdown', () => {
${'- [ ] item'} | ${'- [ ] item\n- [ ] '}
${'- [x] item'} | ${'- [x] item\n- [ ] '}
${'- [X] item'} | ${'- [X] item\n- [ ] '}
+ ${'- [~] item'} | ${'- [~] item\n- [ ] '}
${'- [ ] nbsp (U+00A0)'} | ${'- [ ] nbsp (U+00A0)\n- [ ] '}
${'- item\n - second'} | ${'- item\n - second\n - '}
${'- - -'} | ${'- - -'}
@@ -205,6 +206,7 @@ describe('init markdown', () => {
${'1. [ ] item'} | ${'1. [ ] item\n2. [ ] '}
${'1. [x] item'} | ${'1. [x] item\n2. [ ] '}
${'1. [X] item'} | ${'1. [X] item\n2. [ ] '}
+ ${'1. [~] item'} | ${'1. [~] item\n2. [ ] '}
${'108. item'} | ${'108. item\n109. '}
${'108. item\n - second'} | ${'108. item\n - second\n - '}
${'108. item\n 1. second'} | ${'108. item\n 1. second\n 2. '}
@@ -228,11 +230,13 @@ describe('init markdown', () => {
${'- [ ] item\n- [ ] '} | ${'- [ ] item\n'}
${'- [x] item\n- [x] '} | ${'- [x] item\n'}
${'- [X] item\n- [X] '} | ${'- [X] item\n'}
+ ${'- [~] item\n- [~] '} | ${'- [~] item\n'}
${'- item\n - second\n - '} | ${'- item\n - second\n'}
${'1. item\n2. '} | ${'1. item\n'}
${'1. [ ] item\n2. [ ] '} | ${'1. [ ] item\n'}
${'1. [x] item\n2. [x] '} | ${'1. [x] item\n'}
${'1. [X] item\n2. [X] '} | ${'1. [X] item\n'}
+ ${'1. [~] item\n2. [~] '} | ${'1. [~] item\n'}
${'108. item\n109. '} | ${'108. item\n'}
${'108. item\n - second\n - '} | ${'108. item\n - second\n'}
${'108. item\n 1. second\n 1. '} | ${'108. item\n 1. second\n'}
@@ -301,6 +305,129 @@ describe('init markdown', () => {
});
});
+ describe('shifting selected lines left or right', () => {
+ const indentEvent = new KeyboardEvent('keydown', { key: ']', metaKey: true });
+ const outdentEvent = new KeyboardEvent('keydown', { key: '[', metaKey: true });
+
+ beforeEach(() => {
+ textArea.addEventListener('keydown', keypressNoteText);
+ textArea.addEventListener('compositionstart', compositionStartNoteText);
+ textArea.addEventListener('compositionend', compositionEndNoteText);
+ });
+
+ it.each`
+ selectionStart | selectionEnd | expected | expectedSelectionStart | expectedSelectionEnd
+ ${0} | ${0} | ${' 012\n456\n89'} | ${2} | ${2}
+ ${5} | ${5} | ${'012\n 456\n89'} | ${7} | ${7}
+ ${10} | ${10} | ${'012\n456\n 89'} | ${12} | ${12}
+ ${0} | ${2} | ${' 012\n456\n89'} | ${0} | ${4}
+ ${1} | ${2} | ${' 012\n456\n89'} | ${3} | ${4}
+ ${5} | ${7} | ${'012\n 456\n89'} | ${7} | ${9}
+ ${0} | ${7} | ${' 012\n 456\n89'} | ${0} | ${11}
+ ${2} | ${9} | ${' 012\n 456\n 89'} | ${4} | ${15}
+ `(
+ 'indents the selected lines two spaces to the right',
+ ({
+ selectionStart,
+ selectionEnd,
+ expected,
+ expectedSelectionStart,
+ expectedSelectionEnd,
+ }) => {
+ const text = '012\n456\n89';
+ textArea.value = text;
+ textArea.setSelectionRange(selectionStart, selectionEnd);
+
+ textArea.dispatchEvent(indentEvent);
+
+ expect(textArea.value).toEqual(expected);
+ expect(textArea.selectionStart).toEqual(expectedSelectionStart);
+ expect(textArea.selectionEnd).toEqual(expectedSelectionEnd);
+ },
+ );
+
+ it('indents a blank line two spaces to the right', () => {
+ textArea.value = '012\n\n89';
+ textArea.setSelectionRange(4, 4);
+
+ textArea.dispatchEvent(indentEvent);
+
+ expect(textArea.value).toEqual('012\n \n89');
+ expect(textArea.selectionStart).toEqual(6);
+ expect(textArea.selectionEnd).toEqual(6);
+ });
+
+ it.each`
+ selectionStart | selectionEnd | expected | expectedSelectionStart | expectedSelectionEnd
+ ${0} | ${0} | ${'234\n 789\n 34'} | ${0} | ${0}
+ ${3} | ${3} | ${'234\n 789\n 34'} | ${1} | ${1}
+ ${7} | ${7} | ${' 234\n789\n 34'} | ${6} | ${6}
+ ${0} | ${3} | ${'234\n 789\n 34'} | ${0} | ${1}
+ ${8} | ${10} | ${' 234\n789\n 34'} | ${7} | ${9}
+ ${14} | ${15} | ${' 234\n 789\n34'} | ${12} | ${13}
+ ${0} | ${15} | ${'234\n789\n34'} | ${0} | ${10}
+ ${3} | ${13} | ${'234\n789\n34'} | ${1} | ${8}
+ ${6} | ${6} | ${' 234\n789\n 34'} | ${6} | ${6}
+ `(
+ 'outdents the selected lines two spaces to the left',
+ ({
+ selectionStart,
+ selectionEnd,
+ expected,
+ expectedSelectionStart,
+ expectedSelectionEnd,
+ }) => {
+ const text = ' 234\n 789\n 34';
+ textArea.value = text;
+ textArea.setSelectionRange(selectionStart, selectionEnd);
+
+ textArea.dispatchEvent(outdentEvent);
+
+ expect(textArea.value).toEqual(expected);
+ expect(textArea.selectionStart).toEqual(expectedSelectionStart);
+ expect(textArea.selectionEnd).toEqual(expectedSelectionEnd);
+ },
+ );
+
+ it('outdent a blank line has no effect', () => {
+ textArea.value = '012\n\n89';
+ textArea.setSelectionRange(4, 4);
+
+ textArea.dispatchEvent(outdentEvent);
+
+ expect(textArea.value).toEqual('012\n\n89');
+ expect(textArea.selectionStart).toEqual(4);
+ expect(textArea.selectionEnd).toEqual(4);
+ });
+
+ it('does not indent if meta is not set', () => {
+ const indentNoMetaEvent = new KeyboardEvent('keydown', { key: ']' });
+ const text = '012\n456\n89';
+ textArea.value = text;
+ textArea.setSelectionRange(0, 0);
+
+ textArea.dispatchEvent(indentNoMetaEvent);
+
+ expect(textArea.value).toEqual(text);
+ });
+
+ it.each`
+ keyEvent
+ ${new KeyboardEvent('keydown', { key: ']', metaKey: false })}
+ ${new KeyboardEvent('keydown', { key: ']', metaKey: true, shiftKey: true })}
+ ${new KeyboardEvent('keydown', { key: ']', metaKey: true, altKey: true })}
+ ${new KeyboardEvent('keydown', { key: ']', metaKey: true, ctrlKey: true })}
+ `('does not indent if meta is not set', ({ keyEvent }) => {
+ const text = '012\n456\n89';
+ textArea.value = text;
+ textArea.setSelectionRange(0, 0);
+
+ textArea.dispatchEvent(keyEvent);
+
+ expect(textArea.value).toEqual(text);
+ });
+ });
+
describe('with selection', () => {
let text = 'initial selected value';
let selected = 'selected';
@@ -377,6 +504,15 @@ describe('init markdown', () => {
expect(textArea.value).toEqual(text);
});
+
+ it('does nothing if meta is set', () => {
+ const event = new KeyboardEvent('keydown', { key: '[', metaKey: true });
+
+ textArea.addEventListener('keydown', keypressNoteText);
+ textArea.dispatchEvent(event);
+
+ expect(textArea.value).toEqual(text);
+ });
});
describe('and text to be selected', () => {
diff --git a/spec/frontend/lib/utils/url_utility_spec.js b/spec/frontend/lib/utils/url_utility_spec.js
index 81cf4bd293b..2c6b603197d 100644
--- a/spec/frontend/lib/utils/url_utility_spec.js
+++ b/spec/frontend/lib/utils/url_utility_spec.js
@@ -348,15 +348,13 @@ describe('URL utility', () => {
describe('urlContainsSha', () => {
it('returns true when there is a valid 40-character SHA1 hash in the URL', () => {
shas.valid.forEach((sha) => {
- expect(
- urlUtils.urlContainsSha({ url: `http://urlstuff/${sha}/moreurlstuff` }),
- ).toBeTruthy();
+ expect(urlUtils.urlContainsSha({ url: `http://urlstuff/${sha}/moreurlstuff` })).toBe(true);
});
});
it('returns false when there is not a valid 40-character SHA1 hash in the URL', () => {
shas.invalid.forEach((str) => {
- expect(urlUtils.urlContainsSha({ url: `http://urlstuff/${str}/moreurlstuff` })).toBeFalsy();
+ expect(urlUtils.urlContainsSha({ url: `http://urlstuff/${str}/moreurlstuff` })).toBe(false);
});
});
});
@@ -555,18 +553,22 @@ describe('URL utility', () => {
describe('relativePathToAbsolute', () => {
it.each`
- path | base | result
- ${'./foo'} | ${'bar/'} | ${'/bar/foo'}
- ${'../john.md'} | ${'bar/baz/foo.php'} | ${'/bar/john.md'}
- ${'../images/img.png'} | ${'bar/baz/foo.php'} | ${'/bar/images/img.png'}
- ${'../images/Image 1.png'} | ${'bar/baz/foo.php'} | ${'/bar/images/Image 1.png'}
- ${'/images/img.png'} | ${'bar/baz/foo.php'} | ${'/images/img.png'}
- ${'/images/img.png'} | ${'/bar/baz/foo.php'} | ${'/images/img.png'}
- ${'../john.md'} | ${'/bar/baz/foo.php'} | ${'/bar/john.md'}
- ${'../john.md'} | ${'///bar/baz/foo.php'} | ${'/bar/john.md'}
- ${'/images/img.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/images/img.png'}
- ${'../images/img.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/user/images/img.png'}
- ${'../images/Image 1.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/user/images/Image%201.png'}
+ path | base | result
+ ${'./foo'} | ${'bar/'} | ${'/bar/foo'}
+ ${'../john.md'} | ${'bar/baz/foo.php'} | ${'/bar/john.md'}
+ ${'../images/img.png'} | ${'bar/baz/foo.php'} | ${'/bar/images/img.png'}
+ ${'../images/Image 1.png'} | ${'bar/baz/foo.php'} | ${'/bar/images/Image 1.png'}
+ ${'/images/img.png'} | ${'bar/baz/foo.php'} | ${'/images/img.png'}
+ ${'/images/img.png'} | ${'bar/baz//foo.php'} | ${'/images/img.png'}
+ ${'/images//img.png'} | ${'bar/baz/foo.php'} | ${'/images/img.png'}
+ ${'/images/img.png'} | ${'/bar/baz/foo.php'} | ${'/images/img.png'}
+ ${'../john.md'} | ${'/bar/baz/foo.php'} | ${'/bar/john.md'}
+ ${'../john.md'} | ${'///bar/baz/foo.php'} | ${'/bar/john.md'}
+ ${'/images/img.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/images/img.png'}
+ ${'/images/img.png'} | ${'https://gitlab.com////user/project/'} | ${'https://gitlab.com/images/img.png'}
+ ${'/images////img.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/images/img.png'}
+ ${'../images/img.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/user/images/img.png'}
+ ${'../images/Image 1.png'} | ${'https://gitlab.com/user/project/'} | ${'https://gitlab.com/user/images/Image%201.png'}
`(
'converts relative path "$path" with base "$base" to absolute path => "expected"',
({ path, base, result }) => {
@@ -809,13 +811,13 @@ describe('URL utility', () => {
});
it('should compare against the window location if no compare value is provided', () => {
- expect(urlUtils.urlIsDifferent('different')).toBeTruthy();
- expect(urlUtils.urlIsDifferent(current)).toBeFalsy();
+ expect(urlUtils.urlIsDifferent('different')).toBe(true);
+ expect(urlUtils.urlIsDifferent(current)).toBe(false);
});
it('should use the provided compare value', () => {
- expect(urlUtils.urlIsDifferent('different', current)).toBeTruthy();
- expect(urlUtils.urlIsDifferent(current, current)).toBeFalsy();
+ expect(urlUtils.urlIsDifferent('different', current)).toBe(true);
+ expect(urlUtils.urlIsDifferent(current, current)).toBe(false);
});
});
@@ -1058,4 +1060,28 @@ describe('URL utility', () => {
expect(urlUtils.PROMO_URL).toBe(url);
});
});
+
+ describe('removeUrlProtocol', () => {
+ it.each`
+ input | output
+ ${'http://gitlab.com'} | ${'gitlab.com'}
+ ${'https://gitlab.com'} | ${'gitlab.com'}
+ ${'foo:bar.com'} | ${'bar.com'}
+ ${'gitlab.com'} | ${'gitlab.com'}
+ `('transforms $input to $output', ({ input, output }) => {
+ expect(urlUtils.removeUrlProtocol(input)).toBe(output);
+ });
+ });
+
+ describe('removeLastSlashInUrlPath', () => {
+ it.each`
+ input | output
+ ${'https://www.gitlab.com/path/'} | ${'https://www.gitlab.com/path'}
+ ${'https://www.gitlab.com/?query=search'} | ${'https://www.gitlab.com?query=search'}
+ ${'https://www.gitlab.com/#fragment'} | ${'https://www.gitlab.com#fragment'}
+ ${'https://www.gitlab.com/hello'} | ${'https://www.gitlab.com/hello'}
+ `('transforms $input to $output', ({ input, output }) => {
+ expect(urlUtils.removeLastSlashInUrlPath(input)).toBe(output);
+ });
+ });
});
diff --git a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
index f1471f625f8..3dac47974e7 100644
--- a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
@@ -17,8 +17,8 @@ describe('AccessRequestActionButtons', () => {
});
};
- const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
- const findApproveButton = () => wrapper.find(ApproveAccessRequestButton);
+ const findRemoveMemberButton = () => wrapper.findComponent(RemoveMemberButton);
+ const findApproveButton = () => wrapper.findComponent(ApproveAccessRequestButton);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
index 08d7cf3c932..15bb03480e1 100644
--- a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
@@ -43,8 +43,8 @@ describe('ApproveAccessRequestButton', () => {
});
};
- const findForm = () => wrapper.find(GlForm);
- const findButton = () => findForm().find(GlButton);
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findButton = () => findForm().findComponent(GlButton);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
index 79252456f67..ea819b4fb83 100644
--- a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
@@ -16,8 +16,8 @@ describe('InviteActionButtons', () => {
});
};
- const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
- const findResendInviteButton = () => wrapper.find(ResendInviteButton);
+ const findRemoveMemberButton = () => wrapper.findComponent(RemoveMemberButton);
+ const findResendInviteButton = () => wrapper.findComponent(ResendInviteButton);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/action_buttons/leave_button_spec.js b/spec/frontend/members/components/action_buttons/leave_button_spec.js
index 4859d033464..ecfbf4460a6 100644
--- a/spec/frontend/members/components/action_buttons/leave_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/leave_button_spec.js
@@ -22,7 +22,7 @@ describe('LeaveButton', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
createComponent();
@@ -44,7 +44,7 @@ describe('LeaveButton', () => {
});
it('renders leave modal', () => {
- const leaveModal = wrapper.find(LeaveModal);
+ const leaveModal = wrapper.findComponent(LeaveModal);
expect(leaveModal.exists()).toBe(true);
expect(leaveModal.props('member')).toEqual(member);
diff --git a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
index ca655e36c42..b511cebdf28 100644
--- a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
@@ -42,7 +42,7 @@ describe('RemoveGroupLinkButton', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
index 8e933d16463..51cfd47ddf4 100644
--- a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
@@ -44,7 +44,7 @@ describe('ResendInviteButton', () => {
};
const findForm = () => wrapper.find('form');
- const findButton = () => findForm().find(GlButton);
+ const findButton = () => findForm().findComponent(GlButton);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
index 3e4ffb6e61b..6ac46619bc9 100644
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
@@ -19,7 +19,7 @@ describe('UserActionButtons', () => {
});
};
- const findRemoveMemberButton = () => wrapper.find(RemoveMemberButton);
+ const findRemoveMemberButton = () => wrapper.findComponent(RemoveMemberButton);
afterEach(() => {
wrapper.destroy();
@@ -80,7 +80,7 @@ describe('UserActionButtons', () => {
},
});
- expect(wrapper.find(LeaveButton).exists()).toBe(true);
+ expect(wrapper.findComponent(LeaveButton).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/members/components/app_spec.js b/spec/frontend/members/components/app_spec.js
index 4124a1870a6..d105a4d9fde 100644
--- a/spec/frontend/members/components/app_spec.js
+++ b/spec/frontend/members/components/app_spec.js
@@ -41,8 +41,8 @@ describe('MembersApp', () => {
});
};
- const findAlert = () => wrapper.find(GlAlert);
- const findFilterSortContainer = () => wrapper.find(FilterSortContainer);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findFilterSortContainer = () => wrapper.findComponent(FilterSortContainer);
beforeEach(() => {
commonUtils.scrollToElement = jest.fn();
diff --git a/spec/frontend/members/components/avatars/group_avatar_spec.js b/spec/frontend/members/components/avatars/group_avatar_spec.js
index 9c1574a84ee..13c50de9835 100644
--- a/spec/frontend/members/components/avatars/group_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/group_avatar_spec.js
@@ -30,7 +30,7 @@ describe('MemberList', () => {
});
it('renders link to group', () => {
- const link = wrapper.find(GlAvatarLink);
+ const link = wrapper.findComponent(GlAvatarLink);
expect(link.exists()).toBe(true);
expect(link.attributes('href')).toBe(group.webUrl);
diff --git a/spec/frontend/members/components/avatars/user_avatar_spec.js b/spec/frontend/members/components/avatars/user_avatar_spec.js
index 7bcf4a11413..9b908e5b6f0 100644
--- a/spec/frontend/members/components/avatars/user_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/user_avatar_spec.js
@@ -33,7 +33,7 @@ describe('UserAvatar', () => {
it("renders link to user's profile", () => {
createComponent();
- const link = wrapper.find(GlAvatarLink);
+ const link = wrapper.findComponent(GlAvatarLink);
expect(link.exists()).toBe(true);
expect(link.attributes()).toMatchObject({
@@ -77,7 +77,7 @@ describe('UserAvatar', () => {
`('renders the "$badgeText" badge', ({ member, badgeText }) => {
createComponent({ member });
- expect(wrapper.find(GlBadge).text()).toBe(badgeText);
+ expect(wrapper.findComponent(GlBadge).text()).toBe(badgeText);
});
it('renders the "It\'s you" badge when member is current user', () => {
diff --git a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
index 4ca8a3bdc36..de2f6e6dd47 100644
--- a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
+++ b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
@@ -60,7 +60,7 @@ describe('FilterSortContainer', () => {
},
});
- expect(wrapper.find(MembersFilteredSearchBar).exists()).toBe(true);
+ expect(wrapper.findComponent(MembersFilteredSearchBar).exists()).toBe(true);
});
});
@@ -70,7 +70,7 @@ describe('FilterSortContainer', () => {
tableSortableFields: ['account'],
});
- expect(wrapper.find(SortDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(SortDropdown).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index b692eea4aa5..4580fdb06f2 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -56,7 +56,7 @@ describe('MembersFilteredSearchBar', () => {
});
};
- const findFilteredSearchBar = () => wrapper.find(FilteredSearchBar);
+ const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar);
it('passes correct props to `FilteredSearchBar` component', () => {
createComponent();
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 709ad907a38..5581fd52458 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -43,13 +43,13 @@ describe('SortDropdown', () => {
});
};
- const findSortingComponent = () => wrapper.find(GlSorting);
+ const findSortingComponent = () => wrapper.findComponent(GlSorting);
const findSortDirectionToggle = () =>
findSortingComponent().find('button[title="Sort direction"]');
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
const findDropdownItemByText = (text) =>
wrapper
- .findAll(GlSortingItem)
+ .findAllComponents(GlSortingItem)
.wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.text() === text);
beforeEach(() => {
diff --git a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
index 447496910b8..af96396f09f 100644
--- a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
@@ -47,8 +47,8 @@ describe('RemoveGroupLinkModal', () => {
});
};
- const findModal = () => wrapper.find(GlModal);
- const findForm = () => findModal().find(GlForm);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findForm = () => findModal().findComponent(GlForm);
const getByText = (text, options) =>
createWrapper(within(findModal().element).getByText(text, options));
diff --git a/spec/frontend/members/components/modals/remove_member_modal_spec.js b/spec/frontend/members/components/modals/remove_member_modal_spec.js
index 1d39c4b3175..59b112492b8 100644
--- a/spec/frontend/members/components/modals/remove_member_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_member_modal_spec.js
@@ -46,7 +46,7 @@ describe('RemoveMemberModal', () => {
});
};
- const findForm = () => wrapper.find({ ref: 'form' });
+ const findForm = () => wrapper.findComponent({ ref: 'form' });
const findGlModal = () => wrapper.findComponent(GlModal);
const findUserDeletionObstaclesList = () => wrapper.findComponent(UserDeletionObstaclesList);
diff --git a/spec/frontend/members/components/table/created_at_spec.js b/spec/frontend/members/components/table/created_at_spec.js
index 74b71e22893..793c122587d 100644
--- a/spec/frontend/members/components/table/created_at_spec.js
+++ b/spec/frontend/members/components/table/created_at_spec.js
@@ -39,7 +39,7 @@ describe('CreatedAt', () => {
});
it('uses `TimeAgoTooltip` component to display tooltip', () => {
- expect(wrapper.find(TimeAgoTooltip).exists()).toBe(true);
+ expect(wrapper.findComponent(TimeAgoTooltip).exists()).toBe(true);
});
});
diff --git a/spec/frontend/members/components/table/expiration_datepicker_spec.js b/spec/frontend/members/components/table/expiration_datepicker_spec.js
index 4fb43fbd888..9b8f053348b 100644
--- a/spec/frontend/members/components/table/expiration_datepicker_spec.js
+++ b/spec/frontend/members/components/table/expiration_datepicker_spec.js
@@ -56,7 +56,7 @@ describe('ExpirationDatepicker', () => {
};
const findInput = () => wrapper.find('input');
- const findDatepicker = () => wrapper.find(GlDatepicker);
+ const findDatepicker = () => wrapper.findComponent(GlDatepicker);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/components/table/member_action_buttons_spec.js b/spec/frontend/members/components/table/member_action_buttons_spec.js
index 1379b2d26ce..f3f50bf620a 100644
--- a/spec/frontend/members/components/table/member_action_buttons_spec.js
+++ b/spec/frontend/members/components/table/member_action_buttons_spec.js
@@ -38,7 +38,7 @@ describe('MemberActionButtons', () => {
({ memberType, member, expectedComponent }) => {
createComponent({ memberType, member });
- expect(wrapper.find(expectedComponent).exists()).toBe(true);
+ expect(wrapper.findComponent(expectedComponent).exists()).toBe(true);
},
);
});
diff --git a/spec/frontend/members/components/table/member_avatar_spec.js b/spec/frontend/members/components/table/member_avatar_spec.js
index 3cce64effbc..35f82c28fc5 100644
--- a/spec/frontend/members/components/table/member_avatar_spec.js
+++ b/spec/frontend/members/components/table/member_avatar_spec.js
@@ -33,7 +33,7 @@ describe('MemberList', () => {
({ memberType, member, expectedComponent }) => {
createComponent({ memberType, member });
- expect(wrapper.find(expectedComponent).exists()).toBe(true);
+ expect(wrapper.findComponent(expectedComponent).exists()).toBe(true);
},
);
});
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index 6575a7c7126..fd56699602e 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -69,7 +69,7 @@ describe('MembersTableCell', () => {
});
};
- const findWrappedComponent = () => wrapper.find(WrappedComponent);
+ const findWrappedComponent = () => wrapper.findComponent(WrappedComponent);
const memberCurrentUser = {
...memberMock,
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 08baa663bf0..0ed01396fcb 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -81,13 +81,13 @@ describe('MembersTable', () => {
const url = 'https://localhost/foo-bar/-/project_members?tab=invited';
- const findTable = () => wrapper.find(GlTable);
+ const findTable = () => wrapper.findComponent(GlTable);
const findTableCellByMemberId = (tableCellLabel, memberId) =>
wrapper
.findByTestId(`members-table-row-${memberId}`)
.find(`[data-label="${tableCellLabel}"][role="cell"]`);
- const findPagination = () => extendedWrapper(wrapper.find(GlPagination));
+ const findPagination = () => extendedWrapper(wrapper.findComponent(GlPagination));
const expectCorrectLinkToPage2 = () => {
expect(findPagination().findByText('2', { selector: 'a' }).attributes('href')).toBe(
@@ -126,7 +126,10 @@ describe('MembersTable', () => {
if (expectedComponent) {
expect(
- wrapper.find(`[data-label="${label}"][role="cell"]`).find(expectedComponent).exists(),
+ wrapper
+ .find(`[data-label="${label}"][role="cell"]`)
+ .findComponent(expectedComponent)
+ .exists(),
).toBe(true);
}
});
@@ -179,7 +182,10 @@ describe('MembersTable', () => {
expect(actionField.exists()).toBe(true);
expect(actionField.classes('gl-sr-only')).toBe(true);
expect(
- wrapper.find(`[data-label="Actions"][role="cell"]`).find(MemberActionButtons).exists(),
+ wrapper
+ .find(`[data-label="Actions"][role="cell"]`)
+ .findComponent(MemberActionButtons)
+ .exists(),
).toBe(true);
});
@@ -250,9 +256,9 @@ describe('MembersTable', () => {
it('renders badge in "Max role" field', () => {
createComponent({ members: [memberMock], tableFields: ['maxRole'] });
- expect(wrapper.find(`[data-label="Max role"][role="cell"]`).find(GlBadge).text()).toBe(
- memberMock.accessLevel.stringValue,
- );
+ expect(
+ wrapper.find(`[data-label="Max role"][role="cell"]`).findComponent(GlBadge).text(),
+ ).toBe(memberMock.accessLevel.stringValue);
});
});
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index 2f1626a7044..b254cce4d72 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -57,11 +57,11 @@ describe('RoleDropdown', () => {
);
const getCheckedDropdownItem = () =>
wrapper
- .findAll(GlDropdownItem)
+ .findAllComponents(GlDropdownItem)
.wrappers.find((dropdownItemWrapper) => dropdownItemWrapper.props('isChecked'));
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
- const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/members/index_spec.js b/spec/frontend/members/index_spec.js
index 251a8b0b774..5c813eb2a67 100644
--- a/spec/frontend/members/index_spec.js
+++ b/spec/frontend/members/index_spec.js
@@ -39,7 +39,7 @@ describe('initMembersApp', () => {
it('renders `MembersTabs`', () => {
setup();
- expect(wrapper.find(MembersTabs).exists()).toBe(true);
+ expect(wrapper.findComponent(MembersTabs).exists()).toBe(true);
});
it('parses and sets `members` in Vuex store', () => {
diff --git a/spec/frontend/monitoring/components/charts/anomaly_spec.js b/spec/frontend/monitoring/components/charts/anomaly_spec.js
index 8dc6132709e..3674a49f42c 100644
--- a/spec/frontend/monitoring/components/charts/anomaly_spec.js
+++ b/spec/frontend/monitoring/components/charts/anomaly_spec.js
@@ -20,7 +20,7 @@ describe('Anomaly chart component', () => {
propsData: { ...props },
});
};
- const findTimeSeries = () => wrapper.find(MonitorTimeSeriesChart);
+ const findTimeSeries = () => wrapper.findComponent(MonitorTimeSeriesChart);
const getTimeSeriesProps = () => findTimeSeries().props();
describe('wrapped monitor-time-series-chart component', () => {
diff --git a/spec/frontend/monitoring/components/charts/bar_spec.js b/spec/frontend/monitoring/components/charts/bar_spec.js
index 6368c53943a..5339a7a525b 100644
--- a/spec/frontend/monitoring/components/charts/bar_spec.js
+++ b/spec/frontend/monitoring/components/charts/bar_spec.js
@@ -33,7 +33,7 @@ describe('Bar component', () => {
let chartData;
beforeEach(() => {
- glbarChart = barChart.find(GlBarChart);
+ glbarChart = barChart.findComponent(GlBarChart);
chartData = barChart.vm.chartData[graphData.metrics[0].label];
});
diff --git a/spec/frontend/monitoring/components/charts/column_spec.js b/spec/frontend/monitoring/components/charts/column_spec.js
index e10cb3a456a..0158966997f 100644
--- a/spec/frontend/monitoring/components/charts/column_spec.js
+++ b/spec/frontend/monitoring/components/charts/column_spec.js
@@ -44,7 +44,7 @@ describe('Column component', () => {
},
});
};
- const findChart = () => wrapper.find(GlColumnChart);
+ const findChart = () => wrapper.findComponent(GlColumnChart);
const chartProps = (prop) => findChart().props(prop);
beforeEach(() => {
diff --git a/spec/frontend/monitoring/components/charts/gauge_spec.js b/spec/frontend/monitoring/components/charts/gauge_spec.js
index c8f67d5d8c7..484199698ea 100644
--- a/spec/frontend/monitoring/components/charts/gauge_spec.js
+++ b/spec/frontend/monitoring/components/charts/gauge_spec.js
@@ -8,7 +8,7 @@ describe('Gauge Chart component', () => {
let wrapper;
- const findGaugeChart = () => wrapper.find(GlGaugeChart);
+ const findGaugeChart = () => wrapper.findComponent(GlGaugeChart);
const createWrapper = ({ ...graphProps } = {}) => {
wrapper = shallowMount(GaugeChart, {
diff --git a/spec/frontend/monitoring/components/charts/heatmap_spec.js b/spec/frontend/monitoring/components/charts/heatmap_spec.js
index 841b7e0648a..e163d4e73a0 100644
--- a/spec/frontend/monitoring/components/charts/heatmap_spec.js
+++ b/spec/frontend/monitoring/components/charts/heatmap_spec.js
@@ -8,7 +8,7 @@ describe('Heatmap component', () => {
let wrapper;
let store;
- const findChart = () => wrapper.find(GlHeatmap);
+ const findChart = () => wrapper.findComponent(GlHeatmap);
const graphData = heatmapGraphData();
diff --git a/spec/frontend/monitoring/components/charts/single_stat_spec.js b/spec/frontend/monitoring/components/charts/single_stat_spec.js
index 8633b49565f..62a0b7e6ad3 100644
--- a/spec/frontend/monitoring/components/charts/single_stat_spec.js
+++ b/spec/frontend/monitoring/components/charts/single_stat_spec.js
@@ -15,7 +15,7 @@ describe('Single Stat Chart component', () => {
});
};
- const findChart = () => wrapper.find(GlSingleStat);
+ const findChart = () => wrapper.findComponent(GlSingleStat);
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/monitoring/components/charts/stacked_column_spec.js b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
index 9cab3650f28..91fe36bc6e4 100644
--- a/spec/frontend/monitoring/components/charts/stacked_column_spec.js
+++ b/spec/frontend/monitoring/components/charts/stacked_column_spec.js
@@ -15,8 +15,8 @@ describe('Stacked column chart component', () => {
let wrapper;
- const findChart = () => wrapper.find(GlStackedColumnChart);
- const findLegend = () => wrapper.find(GlChartLegend);
+ const findChart = () => wrapper.findComponent(GlStackedColumnChart);
+ const findLegend = () => wrapper.findComponent(GlChartLegend);
const createWrapper = (props = {}, mountingMethod = shallowMount) =>
mountingMethod(StackedColumnChart, {
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index f4bca26f659..503dee7b937 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -9,7 +9,6 @@ import { mount, shallowMount } from '@vue/test-utils';
import timezoneMock from 'timezone-mock';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
-import { setTestTimeout } from 'helpers/timeout';
import { shallowWrapperContainsSlotText } from 'helpers/vue_test_utils_helper';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import { panelTypes, chartHeight } from '~/monitoring/constants';
@@ -59,17 +58,13 @@ describe('Time series component', () => {
});
};
- beforeEach(() => {
- setTestTimeout(1000);
- });
-
afterEach(() => {
wrapper.destroy();
});
describe('With a single time series', () => {
describe('general functions', () => {
- const findChart = () => wrapper.find({ ref: 'chart' });
+ const findChart = () => wrapper.findComponent({ ref: 'chart' });
beforeEach(async () => {
createWrapper({}, mount);
@@ -215,7 +210,7 @@ describe('Time series component', () => {
const name = 'Metric 1';
const value = '5.556';
const dataIndex = 0;
- const seriesLabel = wrapper.find(GlChartSeriesLabel);
+ const seriesLabel = wrapper.findComponent(GlChartSeriesLabel);
expect(seriesLabel.vm.color).toBe('');
@@ -225,7 +220,11 @@ describe('Time series component', () => {
]);
expect(
- shallowWrapperContainsSlotText(wrapper.find(GlLineChart), 'tooltip-content', value),
+ shallowWrapperContainsSlotText(
+ wrapper.findComponent(GlLineChart),
+ 'tooltip-content',
+ value,
+ ),
).toBe(true);
});
@@ -598,7 +597,7 @@ describe('Time series component', () => {
glChartComponents.forEach((dynamicComponent) => {
describe(`GitLab UI: ${dynamicComponent.chartType}`, () => {
- const findChartComponent = () => wrapper.find(dynamicComponent.component);
+ const findChartComponent = () => wrapper.findComponent(dynamicComponent.component);
beforeEach(async () => {
createWrapper(
@@ -656,7 +655,7 @@ describe('Time series component', () => {
wrapper.vm.tooltip.commitUrl = commitUrl;
await nextTick();
- const commitLink = wrapper.find(GlLink);
+ const commitLink = wrapper.findComponent(GlLink);
expect(shallowWrapperContainsSlotText(commitLink, 'default', mockSha)).toBe(true);
expect(commitLink.attributes('href')).toEqual(commitUrl);
@@ -680,7 +679,9 @@ describe('Time series component', () => {
let lineColors;
beforeEach(() => {
- lineColors = wrapper.find(GlAreaChart).vm.series.map((item) => item.lineStyle.color);
+ lineColors = wrapper
+ .findComponent(GlAreaChart)
+ .vm.series.map((item) => item.lineStyle.color);
});
it('should contain different colors for contiguous time series', () => {
@@ -690,7 +691,7 @@ describe('Time series component', () => {
});
it('should match series color with tooltip label color', () => {
- const labels = wrapper.findAll(GlChartSeriesLabel);
+ const labels = wrapper.findAllComponents(GlChartSeriesLabel);
lineColors.forEach((color, index) => {
const labelColor = labels.at(index).props('color');
@@ -700,7 +701,7 @@ describe('Time series component', () => {
it('should match series color with legend color', () => {
const legendColors = wrapper
- .find(GlChartLegend)
+ .findComponent(GlChartLegend)
.props('seriesInfo')
.map((item) => item.color);
@@ -713,7 +714,7 @@ describe('Time series component', () => {
});
describe('legend layout', () => {
- const findLegend = () => wrapper.find(GlChartLegend);
+ const findLegend = () => wrapper.findComponent(GlChartLegend);
beforeEach(async () => {
createWrapper({}, mount);
diff --git a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
index d74f959ac0f..bb57420d406 100644
--- a/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_actions_menu_spec.js
@@ -92,7 +92,7 @@ describe('Actions menu', () => {
});
it('renders custom metrics form fields', () => {
- expect(wrapper.find(CustomMetricsFormFields).exists()).toBe(true);
+ expect(wrapper.findComponent(CustomMetricsFormFields).exists()).toBe(true);
});
});
@@ -316,7 +316,7 @@ describe('Actions menu', () => {
});
it('is not disabled', () => {
- expect(findStarDashboardItem().attributes('disabled')).toBeFalsy();
+ expect(findStarDashboardItem().attributes('disabled')).toBeUndefined();
});
it('is disabled when starring is taking place', async () => {
diff --git a/spec/frontend/monitoring/components/dashboard_header_spec.js b/spec/frontend/monitoring/components/dashboard_header_spec.js
index e28c2913949..18ccda2c41c 100644
--- a/spec/frontend/monitoring/components/dashboard_header_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_header_spec.js
@@ -29,18 +29,19 @@ describe('Dashboard header', () => {
let store;
let wrapper;
- const findDashboardDropdown = () => wrapper.find(DashboardsDropdown);
+ const findDashboardDropdown = () => wrapper.findComponent(DashboardsDropdown);
- const findEnvsDropdown = () => wrapper.find({ ref: 'monitorEnvironmentsDropdown' });
- const findEnvsDropdownItems = () => findEnvsDropdown().findAll(GlDropdownItem);
- const findEnvsDropdownSearch = () => findEnvsDropdown().find(GlSearchBoxByType);
- const findEnvsDropdownSearchMsg = () => wrapper.find({ ref: 'monitorEnvironmentsDropdownMsg' });
- const findEnvsDropdownLoadingIcon = () => findEnvsDropdown().find(GlLoadingIcon);
+ const findEnvsDropdown = () => wrapper.findComponent({ ref: 'monitorEnvironmentsDropdown' });
+ const findEnvsDropdownItems = () => findEnvsDropdown().findAllComponents(GlDropdownItem);
+ const findEnvsDropdownSearch = () => findEnvsDropdown().findComponent(GlSearchBoxByType);
+ const findEnvsDropdownSearchMsg = () =>
+ wrapper.findComponent({ ref: 'monitorEnvironmentsDropdownMsg' });
+ const findEnvsDropdownLoadingIcon = () => findEnvsDropdown().findComponent(GlLoadingIcon);
- const findDateTimePicker = () => wrapper.find(DateTimePicker);
- const findRefreshButton = () => wrapper.find(RefreshButton);
+ const findDateTimePicker = () => wrapper.findComponent(DateTimePicker);
+ const findRefreshButton = () => wrapper.findComponent(RefreshButton);
- const findActionsMenu = () => wrapper.find(ActionsMenu);
+ const findActionsMenu = () => wrapper.findComponent(ActionsMenu);
const setSearchTerm = (searchTerm) => {
store.commit(`monitoringDashboard/${types.SET_ENVIRONMENTS_FILTER}`, searchTerm);
diff --git a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
index f19ef6c6fb7..d71f6374967 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_builder_spec.js
@@ -32,14 +32,14 @@ describe('dashboard invalid url parameters', () => {
});
};
- const findForm = () => wrapper.find(GlForm);
- const findTxtArea = () => findForm().find(GlFormTextarea);
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findTxtArea = () => findForm().findComponent(GlFormTextarea);
const findSubmitBtn = () => findForm().find('[type="submit"]');
- const findClipboardCopyBtn = () => wrapper.find({ ref: 'clipboardCopyBtn' });
- const findViewDocumentationBtn = () => wrapper.find({ ref: 'viewDocumentationBtn' });
- const findOpenRepositoryBtn = () => wrapper.find({ ref: 'openRepositoryBtn' });
- const findPanel = () => wrapper.find(DashboardPanel);
- const findTimeRangePicker = () => wrapper.find(DateTimePicker);
+ const findClipboardCopyBtn = () => wrapper.findComponent({ ref: 'clipboardCopyBtn' });
+ const findViewDocumentationBtn = () => wrapper.findComponent({ ref: 'viewDocumentationBtn' });
+ const findOpenRepositoryBtn = () => wrapper.findComponent({ ref: 'openRepositoryBtn' });
+ const findPanel = () => wrapper.findComponent(DashboardPanel);
+ const findTimeRangePicker = () => wrapper.findComponent(DateTimePicker);
const findRefreshButton = () => wrapper.find('[data-testid="previewRefreshButton"]');
beforeEach(() => {
@@ -192,8 +192,8 @@ describe('dashboard invalid url parameters', () => {
});
it('displays an alert', () => {
- expect(wrapper.find(GlAlert).exists()).toBe(true);
- expect(wrapper.find(GlAlert).text()).toBe(mockError);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(true);
+ expect(wrapper.findComponent(GlAlert).text()).toBe(mockError);
});
it('displays an empty dashboard panel', () => {
@@ -215,11 +215,11 @@ describe('dashboard invalid url parameters', () => {
});
it('displays no alert', () => {
- expect(wrapper.find(GlAlert).exists()).toBe(false);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
it('displays panel with data', () => {
- const { title, type } = wrapper.find(DashboardPanel).props('graphData');
+ const { title, type } = wrapper.findComponent(DashboardPanel).props('graphData');
expect(title).toBe(mockPanel.title);
expect(type).toBe(mockPanel.type);
diff --git a/spec/frontend/monitoring/components/dashboard_panel_spec.js b/spec/frontend/monitoring/components/dashboard_panel_spec.js
index 7c54a4742ac..d797d9e2ad0 100644
--- a/spec/frontend/monitoring/components/dashboard_panel_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_panel_spec.js
@@ -3,7 +3,6 @@ import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import Vuex from 'vuex';
import { nextTick } from 'vue';
-import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import MonitorAnomalyChart from '~/monitoring/components/charts/anomaly.vue';
@@ -42,11 +41,11 @@ describe('Dashboard Panel', () => {
const exampleText = 'example_text';
- const findCopyLink = () => wrapper.find({ ref: 'copyChartLink' });
- const findTimeChart = () => wrapper.find({ ref: 'timeSeriesChart' });
- const findTitle = () => wrapper.find({ ref: 'graphTitle' });
- const findCtxMenu = () => wrapper.find({ ref: 'contextualMenu' });
- const findMenuItems = () => wrapper.findAll(GlDropdownItem);
+ const findCopyLink = () => wrapper.findComponent({ ref: 'copyChartLink' });
+ const findTimeChart = () => wrapper.findComponent({ ref: 'timeSeriesChart' });
+ const findTitle = () => wrapper.findComponent({ ref: 'graphTitle' });
+ const findCtxMenu = () => wrapper.findComponent({ ref: 'contextualMenu' });
+ const findMenuItems = () => wrapper.findAllComponents(GlDropdownItem);
const findMenuItemByText = (text) => findMenuItems().filter((i) => i.text() === text);
const createWrapper = (props, { mountFn = shallowMount, ...options } = {}) => {
@@ -72,8 +71,6 @@ describe('Dashboard Panel', () => {
};
beforeEach(() => {
- setTestTimeout(1000);
-
store = createStore();
state = store.state.monitoringDashboard;
@@ -118,7 +115,7 @@ describe('Dashboard Panel', () => {
});
it('renders no download csv link', () => {
- expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
+ expect(wrapper.findComponent({ ref: 'downloadCsvLink' }).exists()).toBe(false);
});
it('does not contain graph widgets', () => {
@@ -126,7 +123,7 @@ describe('Dashboard Panel', () => {
});
it('The Empty Chart component is rendered and is a Vue instance', () => {
- expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
+ expect(wrapper.findComponent(MonitorEmptyChart).exists()).toBe(true);
});
});
@@ -146,7 +143,7 @@ describe('Dashboard Panel', () => {
});
it('renders no download csv link', () => {
- expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
+ expect(wrapper.findComponent({ ref: 'downloadCsvLink' }).exists()).toBe(false);
});
it('does not contain graph widgets', () => {
@@ -154,7 +151,7 @@ describe('Dashboard Panel', () => {
});
it('The Empty Chart component is rendered and is a Vue instance', () => {
- expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
+ expect(wrapper.findComponent(MonitorEmptyChart).exists()).toBe(true);
});
});
@@ -173,7 +170,7 @@ describe('Dashboard Panel', () => {
it('contains graph widgets', () => {
expect(findCtxMenu().exists()).toBe(true);
- expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(true);
+ expect(wrapper.findComponent({ ref: 'downloadCsvLink' }).exists()).toBe(true);
});
it('sets no clipboard copy link on dropdown by default', () => {
@@ -208,12 +205,12 @@ describe('Dashboard Panel', () => {
it('empty chart is rendered for empty results', () => {
createWrapper({ graphData: graphDataEmpty });
- expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
+ expect(wrapper.findComponent(MonitorEmptyChart).exists()).toBe(true);
});
it('area chart is rendered by default', () => {
createWrapper();
- expect(wrapper.find(MonitorTimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.findComponent(MonitorTimeSeriesChart).exists()).toBe(true);
});
describe.each`
@@ -234,8 +231,8 @@ describe('Dashboard Panel', () => {
});
it(`renders the chart component and binds attributes`, () => {
- expect(wrapper.find(component).exists()).toBe(true);
- expect(wrapper.find(component).attributes()).toMatchObject(attrs);
+ expect(wrapper.findComponent(component).exists()).toBe(true);
+ expect(wrapper.findComponent(component).attributes()).toMatchObject(attrs);
});
it(`contextual menu is ${hasCtxMenu ? '' : 'not '}shown`, () => {
@@ -273,7 +270,7 @@ describe('Dashboard Panel', () => {
});
describe('Edit custom metric dropdown item', () => {
- const findEditCustomMetricLink = () => wrapper.find({ ref: 'editMetricLink' });
+ const findEditCustomMetricLink = () => wrapper.findComponent({ ref: 'editMetricLink' });
const mockEditPath = '/root/kubernetes-gke-project/prometheus/metrics/23/edit';
beforeEach(async () => {
@@ -434,7 +431,7 @@ describe('Dashboard Panel', () => {
});
it('it renders a time series chart with no errors', () => {
- expect(wrapper.find(MonitorTimeSeriesChart).exists()).toBe(true);
+ expect(wrapper.findComponent(MonitorTimeSeriesChart).exists()).toBe(true);
});
});
@@ -446,7 +443,7 @@ describe('Dashboard Panel', () => {
it('displays a heatmap in local timezone', () => {
createWrapper({ graphData: heatmapGraphData() });
- expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('LOCAL');
+ expect(wrapper.findComponent(MonitorHeatmapChart).props('timezone')).toBe('LOCAL');
});
describe('when timezone is set to UTC', () => {
@@ -461,13 +458,13 @@ describe('Dashboard Panel', () => {
it('displays a heatmap with UTC', () => {
createWrapper({ graphData: heatmapGraphData() });
- expect(wrapper.find(MonitorHeatmapChart).props('timezone')).toBe('UTC');
+ expect(wrapper.findComponent(MonitorHeatmapChart).props('timezone')).toBe('UTC');
});
});
});
describe('Expand to full screen', () => {
- const findExpandBtn = () => wrapper.find({ ref: 'expandBtn' });
+ const findExpandBtn = () => wrapper.findComponent({ ref: 'expandBtn' });
describe('when there is no @expand listener', () => {
it('does not show `View full screen` option', () => {
@@ -495,7 +492,7 @@ describe('Dashboard Panel', () => {
});
describe('When graphData contains links', () => {
- const findManageLinksItem = () => wrapper.find({ ref: 'manageLinksItem' });
+ const findManageLinksItem = () => wrapper.findComponent({ ref: 'manageLinksItem' });
const mockLinks = [
{
url: 'https://example.com',
diff --git a/spec/frontend/monitoring/components/dashboard_spec.js b/spec/frontend/monitoring/components/dashboard_spec.js
index 90171cfc65e..608404e5c5b 100644
--- a/spec/frontend/monitoring/components/dashboard_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_spec.js
@@ -97,8 +97,10 @@ describe('Dashboard', () => {
createShallowWrapper({ hasMetrics: true });
await nextTick();
- expect(wrapper.find(EmptyState).exists()).toBe(true);
- expect(wrapper.find(EmptyState).props('selectedState')).toBe(dashboardEmptyStates.LOADING);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(true);
+ expect(wrapper.findComponent(EmptyState).props('selectedState')).toBe(
+ dashboardEmptyStates.LOADING,
+ );
});
it('hides the group panels when showPanels is false', async () => {
@@ -126,7 +128,7 @@ describe('Dashboard', () => {
describe('panel containers layout', () => {
const findPanelLayoutWrapperAt = (index) => {
return wrapper
- .find(GraphGroup)
+ .findComponent(GraphGroup)
.findAll('[data-testid="dashboard-panel-layout-wrapper"]')
.at(index);
};
@@ -366,7 +368,7 @@ describe('Dashboard', () => {
});
describe('when all panels in the first group are loading', () => {
- const findGroupAt = (i) => wrapper.findAll(GraphGroup).at(i);
+ const findGroupAt = (i) => wrapper.findAllComponents(GraphGroup).at(i);
beforeEach(async () => {
setupStoreWithDashboard(store);
@@ -409,7 +411,7 @@ describe('Dashboard', () => {
setupStoreWithData(store);
await nextTick();
- wrapper.findAll(GraphGroup).wrappers.forEach((groupWrapper) => {
+ wrapper.findAllComponents(GraphGroup).wrappers.forEach((groupWrapper) => {
expect(groupWrapper.props('isLoading')).toBe(false);
});
});
@@ -443,7 +445,7 @@ describe('Dashboard', () => {
});
describe('single panel expands to "full screen" mode', () => {
- const findExpandedPanel = () => wrapper.find({ ref: 'expandedPanel' });
+ const findExpandedPanel = () => wrapper.findComponent({ ref: 'expandedPanel' });
describe('when the panel is not expanded', () => {
beforeEach(async () => {
@@ -457,7 +459,7 @@ describe('Dashboard', () => {
});
it('can set a panel as expanded', () => {
- const panel = wrapper.findAll(DashboardPanel).at(1);
+ const panel = wrapper.findAllComponents(DashboardPanel).at(1);
jest.spyOn(store, 'dispatch');
@@ -503,7 +505,7 @@ describe('Dashboard', () => {
});
it('displays a single panel and others are hidden', () => {
- const panels = wrapper.findAll(MockPanel);
+ const panels = wrapper.findAllComponents(MockPanel);
const visiblePanels = panels.filter((w) => w.isVisible());
expect(findExpandedPanel().isVisible()).toBe(true);
@@ -523,7 +525,7 @@ describe('Dashboard', () => {
});
it('restores full dashboard by clicking `back`', () => {
- wrapper.find({ ref: 'goBackBtn' }).vm.$emit('click');
+ wrapper.findComponent({ ref: 'goBackBtn' }).vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith(
'monitoringDashboard/clearExpandedPanel',
@@ -551,21 +553,21 @@ describe('Dashboard', () => {
});
it('shows a group empty area', () => {
- const emptyGroup = wrapper.findAll({ ref: 'empty-group' });
+ const emptyGroup = wrapper.findAllComponents({ ref: 'empty-group' });
expect(emptyGroup).toHaveLength(1);
expect(emptyGroup.is(GroupEmptyState)).toBe(true);
});
it('group empty area displays a NO_DATA state', () => {
- expect(wrapper.findAll({ ref: 'empty-group' }).at(0).props('selectedState')).toEqual(
- metricStates.NO_DATA,
- );
+ expect(
+ wrapper.findAllComponents({ ref: 'empty-group' }).at(0).props('selectedState'),
+ ).toEqual(metricStates.NO_DATA);
});
});
describe('drag and drop function', () => {
- const findDraggables = () => wrapper.findAll(VueDraggable);
+ const findDraggables = () => wrapper.findAllComponents(VueDraggable);
const findEnabledDraggables = () => findDraggables().filter((f) => !f.attributes('disabled'));
const findDraggablePanels = () => wrapper.findAll('.js-draggable-panel');
const findRearrangeButton = () => wrapper.find('.js-rearrange-button');
@@ -677,7 +679,7 @@ describe('Dashboard', () => {
});
it('hides dashboard header by default', () => {
- expect(wrapper.find({ ref: 'prometheusGraphsHeader' }).exists()).toEqual(false);
+ expect(wrapper.findComponent({ ref: 'prometheusGraphsHeader' }).exists()).toEqual(false);
});
it('renders correctly', () => {
@@ -742,7 +744,7 @@ describe('Dashboard', () => {
const panelIndex = 1; // skip expanded panel
const getClipboardTextFirstPanel = () =>
- wrapper.findAll(DashboardPanel).at(panelIndex).props('clipboardText');
+ wrapper.findAllComponents(DashboardPanel).at(panelIndex).props('clipboardText');
beforeEach(async () => {
setupStoreWithData(store);
@@ -770,7 +772,7 @@ describe('Dashboard', () => {
// While the recommendation in the documentation is to test
// with a data-testid attribute, I want to make sure that
// the dashboard panels have a ref attribute set.
- const getDashboardPanel = () => wrapper.find({ ref: panelRef });
+ const getDashboardPanel = () => wrapper.findComponent({ ref: panelRef });
beforeEach(async () => {
setupStoreWithData(store);
diff --git a/spec/frontend/monitoring/components/dashboard_url_time_spec.js b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
index 64c48100b31..a327e234581 100644
--- a/spec/frontend/monitoring/components/dashboard_url_time_spec.js
+++ b/spec/frontend/monitoring/components/dashboard_url_time_spec.js
@@ -35,7 +35,8 @@ describe('dashboard invalid url parameters', () => {
});
};
- const findDateTimePicker = () => wrapper.find(DashboardHeader).find({ ref: 'dateTimePicker' });
+ const findDateTimePicker = () =>
+ wrapper.findComponent(DashboardHeader).findComponent({ ref: 'dateTimePicker' });
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
index f6d30384847..721992e710a 100644
--- a/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
+++ b/spec/frontend/monitoring/components/dashboards_dropdown_spec.js
@@ -33,11 +33,11 @@ describe('DashboardsDropdown', () => {
});
}
- const findItems = () => wrapper.findAll(GlDropdownItem);
- const findItemAt = (i) => wrapper.findAll(GlDropdownItem).at(i);
- const findSearchInput = () => wrapper.find({ ref: 'monitorDashboardsDropdownSearch' });
- const findNoItemsMsg = () => wrapper.find({ ref: 'monitorDashboardsDropdownMsg' });
- const findStarredListDivider = () => wrapper.find({ ref: 'starredListDivider' });
+ const findItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findItemAt = (i) => wrapper.findAllComponents(GlDropdownItem).at(i);
+ const findSearchInput = () => wrapper.findComponent({ ref: 'monitorDashboardsDropdownSearch' });
+ const findNoItemsMsg = () => wrapper.findComponent({ ref: 'monitorDashboardsDropdownMsg' });
+ const findStarredListDivider = () => wrapper.findComponent({ ref: 'starredListDivider' });
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
// eslint-disable-next-line no-restricted-syntax
const setSearchTerm = (searchTerm) => wrapper.setData({ searchTerm });
@@ -127,7 +127,7 @@ describe('DashboardsDropdown', () => {
});
it('displays a star icon', () => {
- const star = findItemAt(0).find(GlIcon);
+ const star = findItemAt(0).findComponent(GlIcon);
expect(star.exists()).toBe(true);
expect(star.attributes('name')).toBe('star');
});
@@ -148,7 +148,7 @@ describe('DashboardsDropdown', () => {
});
it('displays no star icon', () => {
- const star = findItemAt(0).find(GlIcon);
+ const star = findItemAt(0).findComponent(GlIcon);
expect(star.exists()).toBe(false);
});
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
index 0dd3afd7c83..755204dc721 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_form_spec.js
@@ -18,7 +18,7 @@ const createMountedWrapper = (props = {}) => {
describe('DuplicateDashboardForm', () => {
const defaultBranch = 'main';
- const findByRef = (ref) => wrapper.find({ ref });
+ const findByRef = (ref) => wrapper.findComponent({ ref });
const setValue = (ref, val) => {
findByRef(ref).setValue(val);
};
diff --git a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
index 7e7a7a66d77..3032c236741 100644
--- a/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
+++ b/spec/frontend/monitoring/components/duplicate_dashboard_modal_spec.js
@@ -44,9 +44,9 @@ describe('duplicate dashboard modal', () => {
});
}
- const findAlert = () => wrapper.find(GlAlert);
- const findModal = () => wrapper.find(GlModal);
- const findDuplicateDashboardForm = () => wrapper.find(DuplicateDashboardForm);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findDuplicateDashboardForm = () => wrapper.findComponent(DuplicateDashboardForm);
beforeEach(() => {
mockDashboards = dashboardGitResponse;
@@ -74,7 +74,7 @@ describe('duplicate dashboard modal', () => {
expect(okEvent.preventDefault).toHaveBeenCalled();
expect(wrapper.emitted().dashboardDuplicated).toBeTruthy();
expect(wrapper.emitted().dashboardDuplicated[0]).toEqual([dashboardGitResponse[0]]);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.vm.$refs.duplicateDashboardModal.hide).toHaveBeenCalled();
expect(findAlert().exists()).toBe(false);
});
@@ -92,7 +92,7 @@ describe('duplicate dashboard modal', () => {
expect(findAlert().exists()).toBe(true);
expect(findAlert().text()).toBe(errMsg);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(false);
expect(wrapper.vm.$refs.duplicateDashboardModal.hide).not.toHaveBeenCalled();
});
@@ -102,7 +102,7 @@ describe('duplicate dashboard modal', () => {
commitMessage: 'A commit message',
};
- findModal().find(DuplicateDashboardForm).vm.$emit('change', formVals);
+ findModal().findComponent(DuplicateDashboardForm).vm.$emit('change', formVals);
// Binding's second argument contains the modal id
expect(wrapper.vm.form).toEqual(formVals);
diff --git a/spec/frontend/monitoring/components/embeds/embed_group_spec.js b/spec/frontend/monitoring/components/embeds/embed_group_spec.js
index 47366b345a8..6695353bdb5 100644
--- a/spec/frontend/monitoring/components/embeds/embed_group_spec.js
+++ b/spec/frontend/monitoring/components/embeds/embed_group_spec.js
@@ -58,14 +58,14 @@ describe('Embed Group', () => {
metricsWithDataGetter.mockReturnValue([]);
mountComponent();
- expect(wrapper.find(GlCard).isVisible()).toBe(false);
+ expect(wrapper.findComponent(GlCard).isVisible()).toBe(false);
});
it('shows the component when chart data is loaded', () => {
metricsWithDataGetter.mockReturnValue([1]);
mountComponent();
- expect(wrapper.find(GlCard).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlCard).isVisible()).toBe(true);
});
it('is expanded by default', () => {
@@ -79,7 +79,7 @@ describe('Embed Group', () => {
metricsWithDataGetter.mockReturnValue([1]);
mountComponent({ shallow: false, stubs: { MetricEmbed: true } });
- wrapper.find(GlButton).trigger('click');
+ wrapper.findComponent(GlButton).trigger('click');
await nextTick();
expect(wrapper.find('.gl-card-body').classes()).toContain('d-none');
@@ -93,11 +93,11 @@ describe('Embed Group', () => {
});
it('renders an Embed component', () => {
- expect(wrapper.find(MetricEmbed).exists()).toBe(true);
+ expect(wrapper.findComponent(MetricEmbed).exists()).toBe(true);
});
it('passes the correct props to the Embed component', () => {
- expect(wrapper.find(MetricEmbed).props()).toEqual(singleEmbedProps());
+ expect(wrapper.findComponent(MetricEmbed).props()).toEqual(singleEmbedProps());
});
it('adds the monitoring dashboard module', () => {
@@ -112,7 +112,7 @@ describe('Embed Group', () => {
});
it('passes the correct props to the dashboard Embed component', () => {
- expect(wrapper.find(MetricEmbed).props()).toEqual(dashboardEmbedProps());
+ expect(wrapper.findComponent(MetricEmbed).props()).toEqual(dashboardEmbedProps());
});
it('adds the monitoring dashboard module', () => {
@@ -127,11 +127,11 @@ describe('Embed Group', () => {
});
it('creates Embed components', () => {
- expect(wrapper.findAll(MetricEmbed)).toHaveLength(2);
+ expect(wrapper.findAllComponents(MetricEmbed)).toHaveLength(2);
});
it('passes the correct props to the Embed components', () => {
- expect(wrapper.findAll(MetricEmbed).wrappers.map((item) => item.props())).toEqual(
+ expect(wrapper.findAllComponents(MetricEmbed).wrappers.map((item) => item.props())).toEqual(
multipleEmbedProps(),
);
});
@@ -147,14 +147,14 @@ describe('Embed Group', () => {
metricsWithDataGetter.mockReturnValue([1]);
mountComponent({ shallow: false, stubs: { MetricEmbed: true } });
- expect(wrapper.find(GlButton).text()).toBe('Hide chart');
+ expect(wrapper.findComponent(GlButton).text()).toBe('Hide chart');
});
it('has a plural label when there are multiple embeds', () => {
metricsWithDataGetter.mockReturnValue([2]);
mountComponent({ shallow: false, stubs: { MetricEmbed: true } });
- expect(wrapper.find(GlButton).text()).toBe('Hide charts');
+ expect(wrapper.findComponent(GlButton).text()).toBe('Hide charts');
});
});
});
diff --git a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
index f9f1be4f277..beff3da2baf 100644
--- a/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
+++ b/spec/frontend/monitoring/components/embeds/metric_embed_spec.js
@@ -64,7 +64,7 @@ describe('MetricEmbed', () => {
it('shows an empty state when no metrics are present', () => {
expect(wrapper.find('.metrics-embed').exists()).toBe(true);
- expect(wrapper.find(DashboardPanel).exists()).toBe(false);
+ expect(wrapper.findComponent(DashboardPanel).exists()).toBe(false);
});
});
@@ -92,12 +92,12 @@ describe('MetricEmbed', () => {
it('shows a chart when metrics are present', () => {
expect(wrapper.find('.metrics-embed').exists()).toBe(true);
- expect(wrapper.find(DashboardPanel).exists()).toBe(true);
- expect(wrapper.findAll(DashboardPanel).length).toBe(2);
+ expect(wrapper.findComponent(DashboardPanel).exists()).toBe(true);
+ expect(wrapper.findAllComponents(DashboardPanel).length).toBe(2);
});
it('includes groupId with dashboardUrl', () => {
- expect(wrapper.find(DashboardPanel).props('groupId')).toBe(TEST_HOST);
+ expect(wrapper.findComponent(DashboardPanel).props('groupId')).toBe(TEST_HOST);
});
});
});
diff --git a/spec/frontend/monitoring/components/empty_state_spec.js b/spec/frontend/monitoring/components/empty_state_spec.js
index 1ecb101574b..ddefa8c5cd0 100644
--- a/spec/frontend/monitoring/components/empty_state_spec.js
+++ b/spec/frontend/monitoring/components/empty_state_spec.js
@@ -25,8 +25,8 @@ describe('EmptyState', () => {
selectedState: dashboardEmptyStates.LOADING,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(GlEmptyState).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlEmptyState).exists()).toBe(false);
});
it('shows gettingStarted state', () => {
diff --git a/spec/frontend/monitoring/components/graph_group_spec.js b/spec/frontend/monitoring/components/graph_group_spec.js
index 31f52f6627b..104263e73e0 100644
--- a/spec/frontend/monitoring/components/graph_group_spec.js
+++ b/spec/frontend/monitoring/components/graph_group_spec.js
@@ -6,10 +6,10 @@ import GraphGroup from '~/monitoring/components/graph_group.vue';
describe('Graph group component', () => {
let wrapper;
- const findGroup = () => wrapper.find({ ref: 'graph-group' });
- const findContent = () => wrapper.find({ ref: 'graph-group-content' });
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findCaretIcon = () => wrapper.find(GlIcon);
+ const findGroup = () => wrapper.findComponent({ ref: 'graph-group' });
+ const findContent = () => wrapper.findComponent({ ref: 'graph-group-content' });
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findCaretIcon = () => wrapper.findComponent(GlIcon);
const findToggleButton = () => wrapper.find('[data-testid="group-toggle-button"]');
const createComponent = (propsData) => {
diff --git a/spec/frontend/monitoring/components/group_empty_state_spec.js b/spec/frontend/monitoring/components/group_empty_state_spec.js
index 1dd2ed4e141..e3cd26b0e48 100644
--- a/spec/frontend/monitoring/components/group_empty_state_spec.js
+++ b/spec/frontend/monitoring/components/group_empty_state_spec.js
@@ -45,7 +45,7 @@ describe('GroupEmptyState', () => {
});
it('passes the expected props to GlEmptyState', () => {
- expect(wrapper.find(GlEmptyState).props()).toMatchSnapshot();
+ expect(wrapper.findComponent(GlEmptyState).props()).toMatchSnapshot();
});
});
});
diff --git a/spec/frontend/monitoring/components/links_section_spec.js b/spec/frontend/monitoring/components/links_section_spec.js
index c9b5aeeecb8..94938e7f459 100644
--- a/spec/frontend/monitoring/components/links_section_spec.js
+++ b/spec/frontend/monitoring/components/links_section_spec.js
@@ -21,7 +21,7 @@ describe('Links Section component', () => {
links,
};
};
- const findLinks = () => wrapper.findAll(GlLink);
+ const findLinks = () => wrapper.findAllComponents(GlLink);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/monitoring/components/refresh_button_spec.js b/spec/frontend/monitoring/components/refresh_button_spec.js
index 0e45cc021c5..e00736954a9 100644
--- a/spec/frontend/monitoring/components/refresh_button_spec.js
+++ b/spec/frontend/monitoring/components/refresh_button_spec.js
@@ -15,9 +15,9 @@ describe('RefreshButton', () => {
wrapper = shallowMount(RefreshButton, { store, ...options });
};
- const findRefreshBtn = () => wrapper.find(GlButton);
- const findDropdown = () => wrapper.find(GlDropdown);
- const findOptions = () => findDropdown().findAll(GlDropdownItem);
+ const findRefreshBtn = () => wrapper.findComponent(GlButton);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findOptions = () => findDropdown().findAllComponents(GlDropdownItem);
const findOptionAt = (index) => findOptions().at(index);
const expectFetchDataToHaveBeenCalledTimes = (times) => {
diff --git a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
index 643bbb39f04..012e2e9c3e2 100644
--- a/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
+++ b/spec/frontend/monitoring/components/variables/dropdown_field_spec.js
@@ -27,8 +27,8 @@ describe('Custom variable component', () => {
});
};
- const findDropdown = () => wrapper.find(GlDropdown);
- const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
it('renders dropdown element when all necessary props are passed', () => {
createShallowWrapper();
diff --git a/spec/frontend/monitoring/components/variables_section_spec.js b/spec/frontend/monitoring/components/variables_section_spec.js
index 64b93bd3027..d6f8aac99aa 100644
--- a/spec/frontend/monitoring/components/variables_section_spec.js
+++ b/spec/frontend/monitoring/components/variables_section_spec.js
@@ -24,8 +24,8 @@ describe('Metrics dashboard/variables section component', () => {
});
};
- const findTextInputs = () => wrapper.findAll(TextField);
- const findCustomInputs = () => wrapper.findAll(DropdownField);
+ const findTextInputs = () => wrapper.findAllComponents(TextField);
+ const findCustomInputs = () => wrapper.findAllComponents(DropdownField);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/monitoring/pages/panel_new_page_spec.js b/spec/frontend/monitoring/pages/panel_new_page_spec.js
index c89cbc52bcb..fa112fca2db 100644
--- a/spec/frontend/monitoring/pages/panel_new_page_spec.js
+++ b/spec/frontend/monitoring/pages/panel_new_page_spec.js
@@ -41,8 +41,8 @@ describe('monitoring/pages/panel_new_page', () => {
});
};
- const findBackButton = () => wrapper.find(GlButtonStub);
- const findPanelBuilder = () => wrapper.find(DashboardPanelBuilder);
+ const findBackButton = () => wrapper.findComponent(GlButtonStub);
+ const findPanelBuilder = () => wrapper.findComponent(DashboardPanelBuilder);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/monitoring/router_spec.js b/spec/frontend/monitoring/router_spec.js
index 7758dd351b7..368bd955fb3 100644
--- a/spec/frontend/monitoring/router_spec.js
+++ b/spec/frontend/monitoring/router_spec.js
@@ -61,8 +61,8 @@ describe('Monitoring router', () => {
currentDashboard,
});
- expect(wrapper.find(DashboardPage).exists()).toBe(true);
- expect(wrapper.find(DashboardPage).find(Dashboard).exists()).toBe(true);
+ expect(wrapper.findComponent(DashboardPage).exists()).toBe(true);
+ expect(wrapper.findComponent(DashboardPage).findComponent(Dashboard).exists()).toBe(true);
});
});
@@ -84,8 +84,8 @@ describe('Monitoring router', () => {
currentDashboard,
});
- expect(wrapper.find(DashboardPage).exists()).toBe(true);
- expect(wrapper.find(DashboardPage).find(Dashboard).exists()).toBe(true);
+ expect(wrapper.findComponent(DashboardPage).exists()).toBe(true);
+ expect(wrapper.findComponent(DashboardPage).findComponent(Dashboard).exists()).toBe(true);
});
});
@@ -100,7 +100,7 @@ describe('Monitoring router', () => {
const wrapper = createWrapper(BASE_PATH, path);
expect(wrapper.vm.$route.params.dashboard).toBe(currentDashboard);
- expect(wrapper.find(PanelNewPage).exists()).toBe(true);
+ expect(wrapper.findComponent(PanelNewPage).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index c25de8caa95..54f9c59308e 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -511,10 +511,10 @@ describe('mapToDashboardViewModel', () => {
describe('uniqMetricsId', () => {
[
{ input: { id: 1 }, expected: `${NOT_IN_DB_PREFIX}_1` },
- { input: { metric_id: 2 }, expected: '2_undefined' },
- { input: { metric_id: 2, id: 21 }, expected: '2_21' },
- { input: { metric_id: 22, id: 1 }, expected: '22_1' },
- { input: { metric_id: 'aaa', id: '_a' }, expected: 'aaa__a' },
+ { input: { metricId: 2 }, expected: '2_undefined' },
+ { input: { metricId: 2, id: 21 }, expected: '2_21' },
+ { input: { metricId: 22, id: 1 }, expected: '22_1' },
+ { input: { metricId: 'aaa', id: '_a' }, expected: 'aaa__a' },
].forEach(({ input, expected }) => {
it(`creates unique metric ID with ${JSON.stringify(input)}`, () => {
expect(uniqMetricsId(input)).toEqual(expected);
diff --git a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
index 70df05a2781..6cfbdb16111 100644
--- a/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
+++ b/spec/frontend/nav/components/top_nav_dropdown_menu_spec.js
@@ -124,7 +124,7 @@ describe('~/nav/components/top_nav_dropdown_menu.vue', () => {
});
it('clicked on link with view', () => {
- expect(primaryLink.props('menuItem').view).toBeTruthy();
+ expect(primaryLink.props('menuItem').view).toBe(TEST_NAV_DATA.views.projects.namespace);
});
it('changes active view', () => {
diff --git a/spec/frontend/notes/components/comment_field_layout_spec.js b/spec/frontend/notes/components/comment_field_layout_spec.js
index d69c2c4adfa..6662492fd81 100644
--- a/spec/frontend/notes/components/comment_field_layout_spec.js
+++ b/spec/frontend/notes/components/comment_field_layout_spec.js
@@ -22,8 +22,8 @@ describe('Comment Field Layout Component', () => {
confidential_issues_docs_path: CONFIDENTIAL_ISSUES_DOCS_PATH,
};
- const findIssuableNoteWarning = () => wrapper.find(NoteableWarning);
- const findEmailParticipantsWarning = () => wrapper.find(EmailParticipantsWarning);
+ const findIssuableNoteWarning = () => wrapper.findComponent(NoteableWarning);
+ const findEmailParticipantsWarning = () => wrapper.findComponent(EmailParticipantsWarning);
const findErrorAlert = () => wrapper.findByTestId('comment-field-alert-container');
const createWrapper = (props = {}, slots = {}) => {
diff --git a/spec/frontend/notes/components/diff_discussion_header_spec.js b/spec/frontend/notes/components/diff_discussion_header_spec.js
index 7878737fd31..5800f68b114 100644
--- a/spec/frontend/notes/components/diff_discussion_header_spec.js
+++ b/spec/frontend/notes/components/diff_discussion_header_spec.js
@@ -1,6 +1,7 @@
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
+import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
import diffDiscussionHeader from '~/notes/components/diff_discussion_header.vue';
import createStore from '~/notes/stores';
@@ -15,7 +16,7 @@ describe('diff_discussion_header component', () => {
window.mrTabs = {};
store = createStore();
- wrapper = mount(diffDiscussionHeader, {
+ wrapper = shallowMount(diffDiscussionHeader, {
store,
propsData: { discussion: discussionMock },
});
@@ -25,15 +26,25 @@ describe('diff_discussion_header component', () => {
wrapper.destroy();
});
- it('should render user avatar', async () => {
- const discussion = { ...discussionMock };
- discussion.diff_file = mockDiffFile;
- discussion.diff_discussion = true;
+ describe('Avatar', () => {
+ const firstNoteAuthor = discussionMock.notes[0].author;
+ const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
- wrapper.setProps({ discussion });
+ it('should render user avatar and user avatar link', () => {
+ expect(findAvatar().exists()).toBe(true);
+ expect(findAvatarLink().exists()).toBe(true);
+ });
+
+ it('renders avatar of the first note author', () => {
+ const props = findAvatar().props();
- await nextTick();
- expect(wrapper.find('.user-avatar-link').exists()).toBe(true);
+ expect(props).toMatchObject({
+ src: firstNoteAuthor.avatar_url,
+ alt: firstNoteAuthor.name,
+ size: { default: 24, md: 32 },
+ });
+ });
});
describe('action text', () => {
diff --git a/spec/frontend/notes/components/discussion_actions_spec.js b/spec/frontend/notes/components/discussion_actions_spec.js
index 925dbcc09ec..d16c13d6fd3 100644
--- a/spec/frontend/notes/components/discussion_actions_spec.js
+++ b/spec/frontend/notes/components/discussion_actions_spec.js
@@ -47,9 +47,9 @@ describe('DiscussionActions', () => {
it('renders reply placeholder, resolve discussion button, resolve with issue button and jump to next discussion button', () => {
createComponent();
- expect(wrapper.find(ReplyPlaceholder).exists()).toBe(true);
- expect(wrapper.find(ResolveDiscussionButton).exists()).toBe(true);
- expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(true);
+ expect(wrapper.findComponent(ReplyPlaceholder).exists()).toBe(true);
+ expect(wrapper.findComponent(ResolveDiscussionButton).exists()).toBe(true);
+ expect(wrapper.findComponent(ResolveWithIssueButton).exists()).toBe(true);
});
it('only renders reply placholder if disccusion is not resolvable', () => {
@@ -57,15 +57,15 @@ describe('DiscussionActions', () => {
discussion.resolvable = false;
createComponent({ discussion });
- expect(wrapper.find(ReplyPlaceholder).exists()).toBe(true);
- expect(wrapper.find(ResolveDiscussionButton).exists()).toBe(false);
- expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(false);
+ expect(wrapper.findComponent(ReplyPlaceholder).exists()).toBe(true);
+ expect(wrapper.findComponent(ResolveDiscussionButton).exists()).toBe(false);
+ expect(wrapper.findComponent(ResolveWithIssueButton).exists()).toBe(false);
});
it('does not render resolve with issue button if resolveWithIssuePath is falsy', () => {
createComponent({ resolveWithIssuePath: '' });
- expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(false);
+ expect(wrapper.findComponent(ResolveWithIssueButton).exists()).toBe(false);
});
describe.each`
@@ -82,8 +82,8 @@ describe('DiscussionActions', () => {
});
it(shouldRender ? 'renders resolve buttons' : 'does not render resolve buttons', () => {
- expect(wrapper.find(ResolveDiscussionButton).exists()).toBe(shouldRender);
- expect(wrapper.find(ResolveWithIssueButton).exists()).toBe(shouldRender);
+ expect(wrapper.findComponent(ResolveDiscussionButton).exists()).toBe(shouldRender);
+ expect(wrapper.findComponent(ResolveWithIssueButton).exists()).toBe(shouldRender);
});
});
});
@@ -95,7 +95,7 @@ describe('DiscussionActions', () => {
createComponent({}, { attachTo: document.body });
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find(ReplyPlaceholder).find('textarea').trigger('focus');
+ wrapper.findComponent(ReplyPlaceholder).find('textarea').trigger('focus');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('showReplyForm');
});
@@ -103,7 +103,7 @@ describe('DiscussionActions', () => {
createComponent();
jest.spyOn(wrapper.vm, '$emit');
- wrapper.find(ResolveDiscussionButton).find('button').trigger('click');
+ wrapper.findComponent(ResolveDiscussionButton).find('button').trigger('click');
expect(wrapper.vm.$emit).toHaveBeenCalledWith('resolve');
});
});
diff --git a/spec/frontend/notes/components/discussion_counter_spec.js b/spec/frontend/notes/components/discussion_counter_spec.js
index f016cef18e6..a7e2f1efa09 100644
--- a/spec/frontend/notes/components/discussion_counter_spec.js
+++ b/spec/frontend/notes/components/discussion_counter_spec.js
@@ -47,7 +47,7 @@ describe('DiscussionCounter component', () => {
it('does not render', () => {
wrapper = shallowMount(DiscussionCounter, { store, propsData: { blocksMerge: true } });
- expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(false);
+ expect(wrapper.findComponent({ ref: 'discussionCounter' }).exists()).toBe(false);
});
});
@@ -57,7 +57,7 @@ describe('DiscussionCounter component', () => {
store.dispatch('updateResolvableDiscussionsCounts');
wrapper = shallowMount(DiscussionCounter, { store, propsData: { blocksMerge: true } });
- expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(false);
+ expect(wrapper.findComponent({ ref: 'discussionCounter' }).exists()).toBe(false);
});
});
@@ -77,7 +77,7 @@ describe('DiscussionCounter component', () => {
updateStore();
wrapper = shallowMount(DiscussionCounter, { store, propsData: { blocksMerge: true } });
- expect(wrapper.find({ ref: 'discussionCounter' }).exists()).toBe(true);
+ expect(wrapper.findComponent({ ref: 'discussionCounter' }).exists()).toBe(true);
});
it.each`
@@ -103,7 +103,7 @@ describe('DiscussionCounter component', () => {
updateStore({ resolvable: true, resolved });
wrapper = shallowMount(DiscussionCounter, { store, propsData: { blocksMerge: true } });
- expect(wrapper.findAll(GlButton)).toHaveLength(groupLength);
+ expect(wrapper.findAllComponents(GlButton)).toHaveLength(groupLength);
});
});
diff --git a/spec/frontend/notes/components/discussion_filter_note_spec.js b/spec/frontend/notes/components/discussion_filter_note_spec.js
index ad9a2e898eb..48f5030aa1a 100644
--- a/spec/frontend/notes/components/discussion_filter_note_spec.js
+++ b/spec/frontend/notes/components/discussion_filter_note_spec.js
@@ -31,14 +31,14 @@ describe('DiscussionFilterNote component', () => {
it('emits `dropdownSelect` event with 0 parameter on clicking Show all activity button', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- wrapper.findAll(GlButton).at(0).vm.$emit('click');
+ wrapper.findAllComponents(GlButton).at(0).vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 0);
});
it('emits `dropdownSelect` event with 1 parameter on clicking Show comments only button', () => {
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
- wrapper.findAll(GlButton).at(1).vm.$emit('click');
+ wrapper.findAllComponents(GlButton).at(1).vm.$emit('click');
expect(eventHub.$emit).toHaveBeenCalledWith('dropdownSelect', 1);
});
diff --git a/spec/frontend/notes/components/discussion_notes_spec.js b/spec/frontend/notes/components/discussion_notes_spec.js
index 3506b6ac9f3..1b8b6bec490 100644
--- a/spec/frontend/notes/components/discussion_notes_spec.js
+++ b/spec/frontend/notes/components/discussion_notes_spec.js
@@ -61,13 +61,13 @@ describe('DiscussionNotes', () => {
it('renders an element for each note in the discussion', () => {
createComponent();
const notesCount = discussionMock.notes.length;
- const els = wrapper.findAll(NoteableNote);
+ const els = wrapper.findAllComponents(NoteableNote);
expect(els.length).toBe(notesCount);
});
it('renders one element if replies groupping is enabled', () => {
createComponent({ shouldGroupReplies: true });
- const els = wrapper.findAll(NoteableNote);
+ const els = wrapper.findAllComponents(NoteableNote);
expect(els.length).toBe(1);
});
diff --git a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
index 3932f818c4e..971e3987929 100644
--- a/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
+++ b/spec/frontend/notes/components/discussion_reply_placeholder_spec.js
@@ -15,7 +15,7 @@ describe('ReplyPlaceholder', () => {
});
};
- const findTextarea = () => wrapper.find({ ref: 'textarea' });
+ const findTextarea = () => wrapper.findComponent({ ref: 'textarea' });
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/notes/components/discussion_resolve_button_spec.js b/spec/frontend/notes/components/discussion_resolve_button_spec.js
index ca0c0ca6de8..17c3523cf48 100644
--- a/spec/frontend/notes/components/discussion_resolve_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_button_spec.js
@@ -28,7 +28,7 @@ describe('resolveDiscussionButton', () => {
});
it('should emit a onClick event on button click', async () => {
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
button.vm.$emit('click');
@@ -39,7 +39,7 @@ describe('resolveDiscussionButton', () => {
});
it('should contain the provided button title', () => {
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.text()).toContain(buttonTitle);
});
@@ -52,7 +52,7 @@ describe('resolveDiscussionButton', () => {
},
});
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.props('loading')).toEqual(true);
});
@@ -65,7 +65,7 @@ describe('resolveDiscussionButton', () => {
},
});
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
await nextTick();
expect(button.props('loading')).toEqual(false);
diff --git a/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js b/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
index 5bc6282db03..71406eeb7b4 100644
--- a/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
+++ b/spec/frontend/notes/components/discussion_resolve_with_issue_button_spec.js
@@ -20,7 +20,7 @@ describe('ResolveWithIssueButton', () => {
});
it('it should have a link with the provided link property as href', () => {
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
expect(button.attributes().href).toBe(url);
});
diff --git a/spec/frontend/notes/components/note_actions/reply_button_spec.js b/spec/frontend/notes/components/note_actions/reply_button_spec.js
index 4993ded365d..20b32b8c178 100644
--- a/spec/frontend/notes/components/note_actions/reply_button_spec.js
+++ b/spec/frontend/notes/components/note_actions/reply_button_spec.js
@@ -15,7 +15,7 @@ describe('ReplyButton', () => {
});
it('emits startReplying on click', () => {
- wrapper.find(GlButton).vm.$emit('click');
+ wrapper.findComponent(GlButton).vm.$emit('click');
expect(wrapper.emitted('startReplying')).toEqual([[]]);
});
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index bf5a6b4966a..cbe11c20798 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -16,7 +16,7 @@ describe('noteActions', () => {
let actions;
let axiosMock;
- const findUserAccessRoleBadge = (idx) => wrapper.findAll(UserAccessRoleBadge).at(idx);
+ const findUserAccessRoleBadge = (idx) => wrapper.findAllComponents(UserAccessRoleBadge).at(idx);
const findUserAccessRoleBadgeText = (idx) => findUserAccessRoleBadge(idx).text().trim();
const mountNoteActions = (propsData, computed) => {
@@ -159,7 +159,7 @@ describe('noteActions', () => {
});
});
- describe('when a user has access to edit an issue', () => {
+ describe('when a user can set metadata of an issue', () => {
const testButtonClickTriggersAction = () => {
axiosMock.onPut(`${TEST_HOST}/api/v4/projects/group/project/issues/1`).reply(() => {
expect(actions.updateAssignees).toHaveBeenCalled();
@@ -176,7 +176,7 @@ describe('noteActions', () => {
});
store.state.noteableData = {
current_user: {
- can_update: true,
+ can_set_issue_metadata: true,
},
};
store.state.userData = userDataMock;
@@ -191,6 +191,31 @@ describe('noteActions', () => {
it('should be possible to unassign the comment author', testButtonClickTriggersAction);
});
+ describe('when a user can update but not set metadata of an issue', () => {
+ beforeEach(() => {
+ wrapper = mountNoteActions(props, {
+ targetType: () => 'issue',
+ });
+ store.state.noteableData = {
+ current_user: {
+ can_update: true,
+ can_set_issue_metadata: false,
+ },
+ };
+ store.state.userData = userDataMock;
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ axiosMock.restore();
+ });
+
+ it('should not be possible to assign or unassign the comment author', () => {
+ const assignUserButton = wrapper.find('[data-testid="assign-user"]');
+ expect(assignUserButton.exists()).toBe(false);
+ });
+ });
+
describe('when a user does not have access to edit an issue', () => {
const testButtonDoesNotRender = () => {
const assignUserButton = wrapper.find('[data-testid="assign-user"]');
@@ -241,7 +266,7 @@ describe('noteActions', () => {
});
it('shows a reply button', () => {
- const replyButton = wrapper.find({ ref: 'replyButton' });
+ const replyButton = wrapper.findComponent({ ref: 'replyButton' });
expect(replyButton.exists()).toBe(true);
});
@@ -256,7 +281,7 @@ describe('noteActions', () => {
});
it('does not show a reply button', () => {
- const replyButton = wrapper.find({ ref: 'replyButton' });
+ const replyButton = wrapper.findComponent({ ref: 'replyButton' });
expect(replyButton.exists()).toBe(false);
});
@@ -270,7 +295,7 @@ describe('noteActions', () => {
});
it('should render the right resolve button title', () => {
- const resolveButton = wrapper.find({ ref: 'resolveButton' });
+ const resolveButton = wrapper.findComponent({ ref: 'resolveButton' });
expect(resolveButton.exists()).toBe(true);
expect(resolveButton.attributes('title')).toBe('Thread stays unresolved');
diff --git a/spec/frontend/notes/components/note_attachment_spec.js b/spec/frontend/notes/components/note_attachment_spec.js
index d47c2beaaf8..24632f8e427 100644
--- a/spec/frontend/notes/components/note_attachment_spec.js
+++ b/spec/frontend/notes/components/note_attachment_spec.js
@@ -4,8 +4,8 @@ import NoteAttachment from '~/notes/components/note_attachment.vue';
describe('Issue note attachment', () => {
let wrapper;
- const findImage = () => wrapper.find({ ref: 'attachmentImage' });
- const findUrl = () => wrapper.find({ ref: 'attachmentUrl' });
+ const findImage = () => wrapper.findComponent({ ref: 'attachmentImage' });
+ const findUrl = () => wrapper.findComponent({ ref: 'attachmentUrl' });
const createComponent = (attachment) => {
wrapper = shallowMount(NoteAttachment, {
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index 0f765a8da87..c2e56d3e7a7 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -7,7 +7,6 @@ import NoteAwardsList from '~/notes/components/note_awards_list.vue';
import NoteForm from '~/notes/components/note_form.vue';
import createStore from '~/notes/stores';
import notes from '~/notes/stores/modules/index';
-import { INTERNAL_NOTE_CLASSES } from '~/notes/constants';
import Suggestions from '~/vue_shared/components/markdown/suggestions.vue';
@@ -59,22 +58,10 @@ describe('issue_note_body component', () => {
expect(wrapper.findComponent(NoteAwardsList).exists()).toBe(true);
});
- it('should not have internal note classes', () => {
- expect(wrapper.findByTestId('note-internal-container').classes()).not.toEqual(
- INTERNAL_NOTE_CLASSES,
- );
- });
-
describe('isInternalNote', () => {
beforeEach(() => {
wrapper = createComponent({ props: { isInternalNote: true } });
});
-
- it('should have internal note classes', () => {
- expect(wrapper.findByTestId('note-internal-container').classes()).toEqual(
- INTERNAL_NOTE_CLASSES,
- );
- });
});
describe('isEditing', () => {
@@ -110,12 +97,6 @@ describe('issue_note_body component', () => {
beforeEach(() => {
wrapper.setProps({ isInternalNote: true });
});
-
- it('should not have internal note classes', () => {
- expect(wrapper.findByTestId('note-internal-container').classes()).not.toEqual(
- INTERNAL_NOTE_CLASSES,
- );
- });
});
});
@@ -162,7 +143,7 @@ describe('issue_note_body component', () => {
});
it('passes the correct default placeholder commit message for a suggestion to the suggestions component', () => {
- const commitMessage = wrapper.find(Suggestions).attributes('defaultcommitmessage');
+ const commitMessage = wrapper.findComponent(Suggestions).attributes('defaultcommitmessage');
expect(commitMessage).toBe('branch/pathnameuseruser usertonabc11');
});
diff --git a/spec/frontend/notes/components/note_form_spec.js b/spec/frontend/notes/components/note_form_spec.js
index 252c24d1117..fad04e9063d 100644
--- a/spec/frontend/notes/components/note_form_spec.js
+++ b/spec/frontend/notes/components/note_form_spec.js
@@ -6,6 +6,7 @@ import { getDraft, updateDraft } from '~/lib/utils/autosave';
import NoteForm from '~/notes/components/note_form.vue';
import createStore from '~/notes/stores';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import { AT_WHO_ACTIVE_CLASS } from '~/gfm_auto_complete';
import { noteableDataMock, notesDataMock, discussionMock, note } from '../mock_data';
jest.mock('~/lib/utils/autosave');
@@ -91,7 +92,7 @@ describe('issue_note_form component', () => {
expect(conflictWarning.exists()).toBe(true);
expect(conflictWarning.text().replace(/\s+/g, ' ').trim()).toBe(message);
- expect(conflictWarning.find(GlLink).attributes('href')).toBe('#note_545');
+ expect(conflictWarning.findComponent(GlLink).attributes('href')).toBe('#note_545');
});
});
@@ -133,7 +134,7 @@ describe('issue_note_form component', () => {
it('should link to markdown docs', () => {
const { markdownDocsPath } = notesDataMock;
- const markdownField = wrapper.find(MarkdownField);
+ const markdownField = wrapper.findComponent(MarkdownField);
const markdownFieldProps = markdownField.props();
expect(markdownFieldProps.markdownDocsPath).toBe(markdownDocsPath);
@@ -201,6 +202,21 @@ describe('issue_note_form component', () => {
expect(wrapper.emitted().cancelForm).toHaveLength(1);
});
+ it('will not cancel form if there is an active at-who-active class', async () => {
+ wrapper.setProps({
+ ...props,
+ });
+ await nextTick();
+
+ const textareaEl = wrapper.vm.$refs.textarea;
+ const cancelButton = findCancelButton();
+ textareaEl.classList.add(AT_WHO_ACTIVE_CLASS);
+ cancelButton.vm.$emit('click');
+ await nextTick();
+
+ expect(wrapper.emitted().cancelForm).toBeUndefined();
+ });
+
it('should be possible to update the note', async () => {
wrapper.setProps({
...props,
diff --git a/spec/frontend/notes/components/note_header_spec.js b/spec/frontend/notes/components/note_header_spec.js
index ad2cf1c5a35..43fbc5e26dc 100644
--- a/spec/frontend/notes/components/note_header_spec.js
+++ b/spec/frontend/notes/components/note_header_spec.js
@@ -15,15 +15,15 @@ const actions = {
describe('NoteHeader component', () => {
let wrapper;
- const findActionsWrapper = () => wrapper.find({ ref: 'discussionActions' });
+ const findActionsWrapper = () => wrapper.findComponent({ ref: 'discussionActions' });
const findToggleThreadButton = () => wrapper.findByTestId('thread-toggle');
- const findChevronIcon = () => wrapper.find({ ref: 'chevronIcon' });
- const findActionText = () => wrapper.find({ ref: 'actionText' });
- const findTimestampLink = () => wrapper.find({ ref: 'noteTimestampLink' });
- const findTimestamp = () => wrapper.find({ ref: 'noteTimestamp' });
+ const findChevronIcon = () => wrapper.findComponent({ ref: 'chevronIcon' });
+ const findActionText = () => wrapper.findComponent({ ref: 'actionText' });
+ const findTimestampLink = () => wrapper.findComponent({ ref: 'noteTimestampLink' });
+ const findTimestamp = () => wrapper.findComponent({ ref: 'noteTimestamp' });
const findInternalNoteIndicator = () => wrapper.findByTestId('internalNoteIndicator');
- const findSpinner = () => wrapper.find({ ref: 'spinner' });
- const findAuthorStatus = () => wrapper.find({ ref: 'authorStatus' });
+ const findSpinner = () => wrapper.findComponent({ ref: 'spinner' });
+ const findAuthorStatus = () => wrapper.findComponent({ ref: 'authorStatus' });
const statusHtml =
'"<span class="user-status-emoji has-tooltip" title="foo bar" data-html="true" data-placement="top"><gl-emoji title="basketball and hoop" data-name="basketball" data-unicode-version="6.0">🏀</gl-emoji></span>"';
@@ -228,7 +228,7 @@ describe('NoteHeader component', () => {
const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
- wrapper.find({ ref: 'authorUsernameLink' }).trigger('mouseenter');
+ wrapper.findComponent({ ref: 'authorUsernameLink' }).trigger('mouseenter');
expect(dispatchEvent).toHaveBeenCalledWith(new Event('mouseenter'));
});
@@ -238,7 +238,7 @@ describe('NoteHeader component', () => {
const dispatchEvent = jest.spyOn(wrapper.vm.$refs.authorNameLink, 'dispatchEvent');
- wrapper.find({ ref: 'authorUsernameLink' }).trigger('mouseleave');
+ wrapper.findComponent({ ref: 'authorUsernameLink' }).trigger('mouseleave');
expect(dispatchEvent).toHaveBeenCalledWith(new Event('mouseleave'));
});
@@ -266,8 +266,8 @@ describe('NoteHeader component', () => {
it('toggles hover specific CSS classes on author name link', async () => {
createComponent({ author });
- const authorUsernameLink = wrapper.find({ ref: 'authorUsernameLink' });
- const authorNameLink = wrapper.find({ ref: 'authorNameLink' });
+ const authorUsernameLink = wrapper.findComponent({ ref: 'authorUsernameLink' });
+ const authorNameLink = wrapper.findComponent({ ref: 'authorNameLink' });
authorUsernameLink.trigger('mouseenter');
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index 603db56a098..b34305688d9 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -73,13 +73,13 @@ describe('noteable_discussion component', () => {
expect(wrapper.vm.isReplying).toEqual(false);
- const replyPlaceholder = wrapper.find(ReplyPlaceholder);
+ const replyPlaceholder = wrapper.findComponent(ReplyPlaceholder);
replyPlaceholder.vm.$emit('focus');
await nextTick();
expect(wrapper.vm.isReplying).toEqual(true);
- const noteForm = wrapper.find(NoteForm);
+ const noteForm = wrapper.findComponent(NoteForm);
expect(noteForm.exists()).toBe(true);
@@ -100,11 +100,11 @@ describe('noteable_discussion component', () => {
wrapper.setProps({ discussion: { ...discussionMock, confidential: isNoteInternal } });
await nextTick();
- const replyPlaceholder = wrapper.find(ReplyPlaceholder);
+ const replyPlaceholder = wrapper.findComponent(ReplyPlaceholder);
replyPlaceholder.vm.$emit('focus');
await nextTick();
- expect(wrapper.find(NoteForm).props('saveButtonTitle')).toBe(saveButtonTitle);
+ expect(wrapper.findComponent(NoteForm).props('saveButtonTitle')).toBe(saveButtonTitle);
},
);
@@ -116,7 +116,7 @@ describe('noteable_discussion component', () => {
await nextTick();
- wrapper.find(DiscussionNotes).vm.$emit('startReplying');
+ wrapper.findComponent(DiscussionNotes).vm.$emit('startReplying');
await nextTick();
@@ -139,7 +139,7 @@ describe('noteable_discussion component', () => {
});
it('does not display a button to resolve with issue', () => {
- const button = wrapper.find(ResolveWithIssueButton);
+ const button = wrapper.findComponent(ResolveWithIssueButton);
expect(button.exists()).toBe(false);
});
@@ -159,7 +159,7 @@ describe('noteable_discussion component', () => {
});
it('displays a button to resolve with issue', () => {
- const button = wrapper.find(ResolveWithIssueButton);
+ const button = wrapper.findComponent(ResolveWithIssueButton);
expect(button.exists()).toBe(true);
});
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 3350609bb90..e049c5bc0c8 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -285,11 +285,25 @@ describe('issue_note', () => {
await waitForPromises();
expect(alertSpy).not.toHaveBeenCalled();
expect(wrapper.vm.note.note_html).toBe(
- '<p><img src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7"></p>\n',
+ '<img src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7">',
);
});
});
+ describe('internal note', () => {
+ it('has internal note class for internal notes', () => {
+ createWrapper({ note: { ...note, confidential: true } });
+
+ expect(wrapper.classes()).toContain('internal-note');
+ });
+
+ it('does not have internal note class for external notes', () => {
+ createWrapper({ note });
+
+ expect(wrapper.classes()).not.toContain('internal-note');
+ });
+ });
+
describe('cancel edit', () => {
beforeEach(() => {
createWrapper();
@@ -357,7 +371,7 @@ describe('issue_note', () => {
createWrapper();
updateActions();
wrapper.findComponent(NoteBody).vm.$emit('handleFormUpdate', params);
- expect(wrapper.emitted('handleUpdateNote')).toBeTruthy();
+ expect(wrapper.emitted('handleUpdateNote')).toHaveLength(1);
});
it('does not stringify empty position', () => {
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index 36a68118fa7..d4cb07d97dc 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -4,7 +4,6 @@ import $ from 'jquery';
import { nextTick } from 'vue';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import setWindowLocation from 'helpers/set_window_location_helper';
-import { setTestTimeout } from 'helpers/timeout';
import waitForPromises from 'helpers/wait_for_promises';
import DraftNote from '~/batch_comments/components/draft_note.vue';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
@@ -19,8 +18,6 @@ import '~/behaviors/markdown/render_gfm';
import OrderedLayout from '~/vue_shared/components/ordered_layout.vue';
import * as mockData from '../mock_data';
-setTestTimeout(1000);
-
const TYPE_COMMENT_FORM = 'comment-form';
const TYPE_NOTES_LIST = 'notes-list';
@@ -359,7 +356,7 @@ describe('note_app', () => {
});
it('should listen hashchange event', () => {
- const notesApp = wrapper.find(NotesApp);
+ const notesApp = wrapper.findComponent(NotesApp);
const hash = 'some dummy hash';
jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce(hash);
const setTargetNoteHash = jest.spyOn(notesApp.vm, 'setTargetNoteHash');
@@ -439,7 +436,7 @@ describe('note_app', () => {
});
it('correctly finds only draft comments', () => {
- const drafts = wrapper.findAll(DraftNote).wrappers;
+ const drafts = wrapper.findAllComponents(DraftNote).wrappers;
expect(drafts.map((x) => x.props('draft'))).toEqual(
mockData.draftComments.map(({ note }) => expect.objectContaining({ note })),
diff --git a/spec/frontend/notes/components/sort_discussion_spec.js b/spec/frontend/notes/components/sort_discussion_spec.js
index bde27b7e5fc..8b6e05da3c0 100644
--- a/spec/frontend/notes/components/sort_discussion_spec.js
+++ b/spec/frontend/notes/components/sort_discussion_spec.js
@@ -21,7 +21,7 @@ describe('Sort Discussion component', () => {
});
};
- const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
+ const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/notes/components/timeline_toggle_spec.js b/spec/frontend/notes/components/timeline_toggle_spec.js
index 84fa3008835..cf79416d300 100644
--- a/spec/frontend/notes/components/timeline_toggle_spec.js
+++ b/spec/frontend/notes/components/timeline_toggle_spec.js
@@ -27,7 +27,7 @@ describe('Timeline toggle', () => {
});
};
- const findGlButton = () => wrapper.find(GlButton);
+ const findGlButton = () => wrapper.findComponent(GlButton);
beforeEach(() => {
store = createStore();
diff --git a/spec/frontend/notes/deprecated_notes_spec.js b/spec/frontend/notes/deprecated_notes_spec.js
index 40b124b9029..d5e2a189afe 100644
--- a/spec/frontend/notes/deprecated_notes_spec.js
+++ b/spec/frontend/notes/deprecated_notes_spec.js
@@ -7,7 +7,6 @@ import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { createSpyObj } from 'helpers/jest_helpers';
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
-import { setTestTimeoutOnce } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import * as urlUtility from '~/lib/utils/url_utility';
@@ -48,7 +47,6 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
// random failures.
// It seems that running tests in parallel increases failure rate.
jest.setTimeout(4000);
- setTestTimeoutOnce(4000);
});
afterEach(async () => {
@@ -510,7 +508,7 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
notes.putEditFormInPlace($el);
- expect(notes.glForm.enableGFM).toBeTruthy();
+ expect(notes.glForm.enableGFM).toBe('');
});
});
@@ -783,21 +781,21 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
const sampleComment = '/wip\n/milestone %1.0\n/merge\n/unassign Merging this';
const hasQuickActions = notes.hasQuickActions(sampleComment);
- expect(hasQuickActions).toBeTruthy();
+ expect(hasQuickActions).toBe(true);
});
it('should return false when comment does NOT begin with a quick action', () => {
const sampleComment = 'Hey, /unassign Merging this';
const hasQuickActions = notes.hasQuickActions(sampleComment);
- expect(hasQuickActions).toBeFalsy();
+ expect(hasQuickActions).toBe(false);
});
it('should return false when comment does NOT have any quick actions', () => {
const sampleComment = 'Looking good, Awesome!';
const hasQuickActions = notes.hasQuickActions(sampleComment);
- expect(hasQuickActions).toBeFalsy();
+ expect(hasQuickActions).toBe(false);
});
});
@@ -887,14 +885,14 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
expect($tempNote.prop('nodeName')).toEqual('LI');
expect($tempNote.attr('id')).toEqual(uniqueId);
- expect($tempNote.hasClass('being-posted')).toBeTruthy();
- expect($tempNote.hasClass('fade-in-half')).toBeTruthy();
+ expect($tempNote.hasClass('being-posted')).toBe(true);
+ expect($tempNote.hasClass('fade-in-half')).toBe(true);
$tempNote.find('.timeline-icon > a, .note-header-info > a').each((i, el) => {
expect(el.getAttribute('href')).toEqual(`/${currentUsername}`);
});
expect($tempNote.find('.timeline-icon .avatar').attr('src')).toEqual(currentUserAvatar);
- expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeFalsy();
+ expect($tempNote.find('.timeline-content').hasClass('discussion')).toBe(false);
expect($tempNoteHeader.find('.d-none.d-sm-inline-block').text().trim()).toEqual(
currentUserFullname,
);
@@ -916,7 +914,7 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
});
expect($tempNote.prop('nodeName')).toEqual('LI');
- expect($tempNote.find('.timeline-content').hasClass('discussion')).toBeTruthy();
+ expect($tempNote.find('.timeline-content').hasClass('discussion')).toBe(true);
});
it('should return a escaped user name', () => {
@@ -954,8 +952,8 @@ describe.skip('Old Notes (~/deprecated_notes.js)', () => {
expect($tempNote.prop('nodeName')).toEqual('LI');
expect($tempNote.attr('id')).toEqual(uniqueId);
- expect($tempNote.hasClass('being-posted')).toBeTruthy();
- expect($tempNote.hasClass('fade-in-half')).toBeTruthy();
+ expect($tempNote.hasClass('being-posted')).toBe(true);
+ expect($tempNote.hasClass('fade-in-half')).toBe(true);
expect($tempNote.find('.timeline-content i').text().trim()).toEqual(sampleCommandDescription);
});
});
diff --git a/spec/frontend/operation_settings/components/metrics_settings_spec.js b/spec/frontend/operation_settings/components/metrics_settings_spec.js
index c1fa1d24a82..21145466016 100644
--- a/spec/frontend/operation_settings/components/metrics_settings_spec.js
+++ b/spec/frontend/operation_settings/components/metrics_settings_spec.js
@@ -105,7 +105,10 @@ describe('operation settings external dashboard component', () => {
it('uses description text', () => {
const description = formGroup.find('small');
- expect(description.text()).not.toBeFalsy();
+ const expectedDescription =
+ "Choose whether to display dashboard metrics in UTC or the user's local timezone.";
+
+ expect(description.text()).toBe(expectedDescription);
});
});
@@ -138,7 +141,10 @@ describe('operation settings external dashboard component', () => {
it('uses description text', () => {
const description = formGroup.find('small');
- expect(description.text()).not.toBeFalsy();
+ const expectedDescription =
+ 'Add a button to the metrics dashboard linking directly to your existing external dashboard.';
+
+ expect(description.text()).toBe(expectedDescription);
});
});
@@ -151,7 +157,6 @@ describe('operation settings external dashboard component', () => {
});
it('defaults to externalDashboardUrl', () => {
- expect(input.attributes().value).toBeTruthy();
expect(input.attributes().value).toBe(externalDashboardUrl);
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
index ef6c4a1fa32..b163557618e 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_spec.js
@@ -4,7 +4,6 @@ import { GlEmptyState } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { stripTypenames } from 'helpers/graphql_helpers';
import component from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list.vue';
import TagsListRow from '~/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue';
@@ -96,8 +95,8 @@ describe('Tags List', () => {
it('binds the correct props', () => {
expect(findRegistryList().props()).toMatchObject({
title: '2 tags',
- pagination: stripTypenames(tagsPageInfo),
- items: stripTypenames(tags),
+ pagination: tagsPageInfo,
+ items: tags,
idProperty: 'name',
});
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
index a5b2b1d7cf8..61503d0f3bf 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/cleanup_status_spec.js
@@ -90,18 +90,26 @@ describe('cleanup_status', () => {
`(
'when the status is $status is $visible that the extra icon is visible',
({ status, visible }) => {
- mountComponent({ status });
+ mountComponent({ status, expirationPolicy: { next_run_at: '2063-04-08T01:44:03Z' } });
expect(findExtraInfoIcon().exists()).toBe(visible);
},
);
+ it(`when the status is ${UNFINISHED_STATUS} & expirationPolicy does not exist the extra icon is not visible`, () => {
+ mountComponent({
+ status: UNFINISHED_STATUS,
+ });
+
+ expect(findExtraInfoIcon().exists()).toBe(false);
+ });
+
it(`has a popover with a learn more link and a time frame for the next run`, () => {
jest.spyOn(Date, 'now').mockImplementation(() => new Date('2063-04-04T00:42:00Z').getTime());
mountComponent({
status: UNFINISHED_STATUS,
- expirationPolicy: { next_run: '2063-04-08T01:44:03Z' },
+ expirationPolicy: { next_run_at: '2063-04-08T01:44:03Z' },
});
expect(findPopover().exists()).toBe(true);
@@ -113,7 +121,7 @@ describe('cleanup_status', () => {
it('id matches popover target attribute', () => {
mountComponent({
status: UNFINISHED_STATUS,
- next_run_at: '2063-04-08T01:44:03Z',
+ expirationPolicy: { next_run_at: '2063-04-08T01:44:03Z' },
});
const id = findExtraInfoIcon().attributes('id');
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
index f9739509ef9..b11048cd7a2 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
@@ -13,7 +13,7 @@ export const imagesListResponse = [
expirationPolicyCleanupStatus: 'UNSCHEDULED',
project: {
id: 'gid://gitlab/Project/22',
- path: 'gitlab-test',
+ path: 'GITLAB-TEST',
},
},
{
diff --git a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
index f2901148e17..fb50d623543 100644
--- a/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
+++ b/spec/frontend/packages_and_registries/dependency_proxy/app_spec.js
@@ -50,6 +50,7 @@ describe('DependencyProxyApp', () => {
groupPath: 'gitlab-org',
groupId: dummyGrouptId,
noManifestsIllustration: 'noManifestsIllustration',
+ canClearCache: true,
};
function createComponent({ provide = provideDefaults } = {}) {
@@ -268,6 +269,23 @@ describe('DependencyProxyApp', () => {
'All items in the cache are scheduled for removal.',
);
});
+
+ describe('when user has no permission to clear cache', () => {
+ beforeEach(() => {
+ createComponent({
+ provide: {
+ groupPath: 'gitlab-org',
+ groupId: dummyGrouptId,
+ noManifestsIllustration: 'noManifestsIllustration',
+ canClearCache: false,
+ },
+ });
+ });
+
+ it('does not show the clear cache dropdown list', () => {
+ expect(findClearCacheDropdownList().exists()).toBe(false);
+ });
+ });
});
});
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js
index 79c1b18c9f9..721bdd34a4f 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/shared/package_list_row_spec.js
@@ -128,7 +128,7 @@ describe('packages_list_row', () => {
findDeleteButton().vm.$emit('click');
await nextTick();
- expect(wrapper.emitted('packageToDelete')).toBeTruthy();
+ expect(wrapper.emitted('packageToDelete')).toHaveLength(1);
expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
index fdddc131412..61923233d2e 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/package_title_spec.js.snap
@@ -29,19 +29,25 @@ exports[`PackageTitle renders with tags 1`] = `
<div
class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-3"
>
- <span
+ <div
+ class="gl-display-flex gl-gap-3"
data-testid="sub-header"
>
v
1.0.0
published
<time-ago-tooltip-stub
- class="gl-ml-2"
cssclass=""
time="2020-08-17T14:23:32Z"
tooltipplacement="top"
/>
- </span>
+
+ <package-tags-stub
+ hidelabel="true"
+ tagdisplaylimit="2"
+ tags="[object Object],[object Object],[object Object]"
+ />
+ </div>
</div>
</div>
</div>
@@ -73,15 +79,6 @@ exports[`PackageTitle renders with tags 1`] = `
texttooltip=""
/>
</div>
- <div
- class="gl-display-flex gl-align-items-center gl-mr-5"
- >
- <package-tags-stub
- hidelabel="true"
- tagdisplaylimit="2"
- tags="[object Object],[object Object],[object Object]"
- />
- </div>
</div>
</div>
@@ -121,19 +118,21 @@ exports[`PackageTitle renders without tags 1`] = `
<div
class="gl-display-flex gl-align-items-center gl-text-gray-500 gl-mt-3"
>
- <span
+ <div
+ class="gl-display-flex gl-gap-3"
data-testid="sub-header"
>
v
1.0.0
published
<time-ago-tooltip-stub
- class="gl-ml-2"
cssclass=""
time="2020-08-17T14:23:32Z"
tooltipplacement="top"
/>
- </span>
+
+ <!---->
+ </div>
</div>
</div>
</div>
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
index 06ae8645101..92c2cd90568 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -2,19 +2,161 @@
exports[`PypiInstallation renders all the messages 1`] = `
<div>
- <installation-title-stub
- options="[object Object]"
- packagetype="pypi"
- />
+ <div
+ class="gl-display-flex gl-justify-content-space-between gl-align-items-center"
+ >
+ <h3
+ class="gl-font-lg"
+ >
+ Installation
+ </h3>
+
+ <div>
+ <div
+ class="dropdown b-dropdown gl-new-dropdown btn-group"
+ id="__BVID__27"
+ lazy=""
+ >
+ <!---->
+ <button
+ aria-expanded="false"
+ aria-haspopup="true"
+ class="btn dropdown-toggle btn-default btn-md gl-button gl-dropdown-toggle"
+ id="__BVID__27__BV_toggle_"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-new-dropdown-button-text"
+ >
+ Show PyPi commands
+ </span>
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon dropdown-chevron gl-icon s16"
+ data-testid="chevron-down-icon"
+ role="img"
+ >
+ <use
+ href="#chevron-down"
+ />
+ </svg>
+ </button>
+ <ul
+ aria-labelledby="__BVID__27__BV_toggle_"
+ class="dropdown-menu"
+ role="menu"
+ tabindex="-1"
+ >
+ <!---->
+ </ul>
+ </div>
+ </div>
+ </div>
- <code-instruction-stub
- copytext="Copy Pip command"
- data-testid="pip-command"
- instruction="pip install @gitlab-org/package-15 --extra-index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
- label="Pip Command"
- trackingaction="copy_pip_install_command"
- trackinglabel="code_instruction"
- />
+ <fieldset
+ aria-describedby="installation-pip-command-group__BV_description_"
+ class="form-group gl-form-group"
+ id="installation-pip-command-group"
+ >
+ <legend
+ class="bv-no-focus-ring col-form-label pt-0 col-form-label"
+ id="installation-pip-command-group__BV_label_"
+ tabindex="-1"
+ >
+
+
+
+ <!---->
+
+ <!---->
+ </legend>
+ <div
+ aria-labelledby="installation-pip-command-group__BV_label_"
+ class="bv-no-focus-ring"
+ role="group"
+ tabindex="-1"
+ >
+ <div
+ data-testid="pip-command"
+ id="installation-pip-command"
+ >
+ <label
+ for="instruction-input_5"
+ >
+ Pip Command
+ </label>
+
+ <div
+ class="gl-mb-3"
+ >
+ <div
+ class="input-group gl-mb-3"
+ >
+ <input
+ class="form-control gl-font-monospace"
+ data-testid="instruction-input"
+ id="instruction-input_5"
+ readonly="readonly"
+ type="text"
+ />
+
+ <span
+ class="input-group-append"
+ data-testid="instruction-button"
+ >
+ <button
+ aria-label="Copy Pip command"
+ aria-live="polite"
+ class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
+ data-clipboard-handle-tooltip="false"
+ data-clipboard-text="pip install @gitlab-org/package-15 --extra-index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
+ id="clipboard-button-6"
+ title="Copy Pip command"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon gl-icon s16"
+ data-testid="copy-to-clipboard-icon"
+ role="img"
+ >
+ <use
+ href="#copy-to-clipboard"
+ />
+ </svg>
+
+ <!---->
+ </button>
+ </span>
+ </div>
+ </div>
+ </div>
+ <!---->
+ <!---->
+ <small
+ class="form-text text-muted"
+ id="installation-pip-command-group__BV_description_"
+ tabindex="-1"
+ >
+ You will need a
+ <a
+ class="gl-link"
+ data-testid="access-token-link"
+ href="/help/user/profile/personal_access_tokens"
+ >
+ personal access token
+ </a>
+ .
+ </small>
+ </div>
+ </fieldset>
<h3
class="gl-font-lg"
@@ -30,25 +172,33 @@ exports[`PypiInstallation renders all the messages 1`] = `
file.
</p>
- <code-instruction-stub
- copytext="Copy .pypirc content"
+ <div
data-testid="pypi-setup-content"
- instruction="[gitlab]
+ >
+ <!---->
+
+ <div>
+ <pre
+ class="gl-font-monospace"
+ data-testid="multiline-instruction"
+ >
+ [gitlab]
repository = http://gdk.test:3000/api/v4/projects/1/packages/pypi
username = __token__
-password = <your personal access token>"
- label=""
- multiline="true"
- trackingaction="copy_pypi_setup_command"
- trackinglabel="code_instruction"
- />
+password = &lt;your personal access token&gt;
+ </pre>
+ </div>
+ </div>
For more information on the PyPi registry,
- <gl-link-stub
+ <a
+ class="gl-link"
+ data-testid="pypi-docs-link"
href="/help/user/packages/pypi_repository/index"
+ rel="noopener"
target="_blank"
>
see the documentation
- </gl-link-stub>
+ </a>
.
</div>
`;
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
index 0447ead0830..529a6a22ddf 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_files_spec.js
@@ -1,6 +1,6 @@
-import { GlDropdown, GlButton } from '@gitlab/ui';
+import { GlDropdown, GlButton, GlFormCheckbox } from '@gitlab/ui';
import { nextTick } from 'vue';
-import stubChildren from 'helpers/stub_children';
+import { stubComponent } from 'helpers/stub_component';
import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import { packageFiles as packageFilesMock } from 'jest/packages_and_registries/package_registry/mock_data';
import PackageFiles from '~/packages_and_registries/package_registry/components/details/package_files.vue';
@@ -11,6 +11,7 @@ describe('Package Files', () => {
let wrapper;
const findAllRows = () => wrapper.findAllByTestId('file-row');
+ const findDeleteSelectedButton = () => wrapper.findByTestId('delete-selected');
const findFirstRow = () => extendedWrapper(findAllRows().at(0));
const findSecondRow = () => extendedWrapper(findAllRows().at(1));
const findFirstRowDownloadLink = () => findFirstRow().findByTestId('download-link');
@@ -22,19 +23,27 @@ describe('Package Files', () => {
const findActionMenuDelete = () => findFirstActionMenu().findByTestId('delete-file');
const findFirstToggleDetailsButton = () => findFirstRow().findComponent(GlButton);
const findFirstRowShaComponent = (id) => wrapper.findByTestId(id);
+ const findCheckAllCheckbox = () => wrapper.findByTestId('package-files-checkbox-all');
+ const findAllRowCheckboxes = () => wrapper.findAllByTestId('package-files-checkbox');
const files = packageFilesMock();
const [file] = files;
- const createComponent = ({ packageFiles = [file], canDelete = true } = {}) => {
+ const createComponent = ({
+ packageFiles = [file],
+ isLoading = false,
+ canDelete = true,
+ stubs,
+ } = {}) => {
wrapper = mountExtended(PackageFiles, {
propsData: {
canDelete,
+ isLoading,
packageFiles,
},
stubs: {
- ...stubChildren(PackageFiles),
- GlTableLite: false,
+ GlTable: false,
+ ...stubs,
},
});
};
@@ -157,43 +166,170 @@ describe('Package Files', () => {
expect(findSecondRowCommitLink().exists()).toBe(false);
});
});
+ });
- describe('action menu', () => {
- describe('when the user can delete', () => {
- it('exists', () => {
- createComponent();
+ describe('action menu', () => {
+ describe('when the user can delete', () => {
+ it('exists', () => {
+ createComponent();
- expect(findFirstActionMenu().exists()).toBe(true);
- });
+ expect(findFirstActionMenu().exists()).toBe(true);
+ expect(findFirstActionMenu().props('icon')).toBe('ellipsis_v');
+ expect(findFirstActionMenu().props('textSrOnly')).toBe(true);
+ expect(findFirstActionMenu().props('text')).toMatchInterpolatedText('More actions');
+ });
- describe('menu items', () => {
- describe('delete file', () => {
- it('exists', () => {
- createComponent();
+ describe('menu items', () => {
+ describe('delete file', () => {
+ it('exists', () => {
+ createComponent();
- expect(findActionMenuDelete().exists()).toBe(true);
- });
+ expect(findActionMenuDelete().exists()).toBe(true);
+ });
- it('emits a delete event when clicked', () => {
- createComponent();
+ it('emits a delete event when clicked', async () => {
+ createComponent();
- findActionMenuDelete().vm.$emit('click');
+ await findActionMenuDelete().trigger('click');
- const [[{ id }]] = wrapper.emitted('delete-file');
- expect(id).toBe(file.id);
- });
+ const [[items]] = wrapper.emitted('delete-files');
+ const [{ id }] = items;
+ expect(id).toBe(file.id);
});
});
});
+ });
+
+ describe('when the user can not delete', () => {
+ const canDelete = false;
+
+ it('does not exist', () => {
+ createComponent({ canDelete });
+
+ expect(findFirstActionMenu().exists()).toBe(false);
+ });
+ });
+ });
+
+ describe('multi select', () => {
+ describe('when user can delete', () => {
+ it('delete selected button exists & is disabled', () => {
+ createComponent();
+
+ expect(findDeleteSelectedButton().exists()).toBe(true);
+ expect(findDeleteSelectedButton().text()).toMatchInterpolatedText('Delete selected');
+ expect(findDeleteSelectedButton().props('disabled')).toBe(true);
+ });
+
+ it('delete selected button exists & is disabled when isLoading prop is true', () => {
+ createComponent({ isLoading: true });
+
+ expect(findDeleteSelectedButton().props('disabled')).toBe(true);
+ });
+
+ it('checkboxes to select file are visible', () => {
+ createComponent({ packageFiles: files });
+
+ expect(findCheckAllCheckbox().exists()).toBe(true);
+ expect(findAllRowCheckboxes()).toHaveLength(2);
+ });
+
+ it('selecting a checkbox enables delete selected button', async () => {
+ createComponent();
+
+ const first = findAllRowCheckboxes().at(0);
+
+ await first.setChecked(true);
+
+ expect(findDeleteSelectedButton().props('disabled')).toBe(false);
+ });
+
+ describe('select all checkbox', () => {
+ it('will toggle between selecting all and deselecting all files', async () => {
+ const getChecked = () => findAllRowCheckboxes().filter((x) => x.element.checked === true);
+
+ createComponent({ packageFiles: files });
+
+ expect(getChecked()).toHaveLength(0);
+
+ await findCheckAllCheckbox().setChecked(true);
- describe('when the user can not delete', () => {
- const canDelete = false;
+ expect(getChecked()).toHaveLength(files.length);
- it('does not exist', () => {
- createComponent({ canDelete });
+ await findCheckAllCheckbox().setChecked(false);
- expect(findFirstActionMenu().exists()).toBe(false);
+ expect(getChecked()).toHaveLength(0);
});
+
+ it('will toggle the indeterminate state when some but not all files are selected', async () => {
+ const expectIndeterminateState = (state) =>
+ expect(findCheckAllCheckbox().props('indeterminate')).toBe(state);
+
+ createComponent({
+ packageFiles: files,
+ stubs: { GlFormCheckbox: stubComponent(GlFormCheckbox, { props: ['indeterminate'] }) },
+ });
+
+ expectIndeterminateState(false);
+
+ await findSecondRow().trigger('click');
+
+ expectIndeterminateState(true);
+
+ await findSecondRow().trigger('click');
+
+ expectIndeterminateState(false);
+
+ findCheckAllCheckbox().trigger('click');
+
+ expectIndeterminateState(false);
+
+ await findSecondRow().trigger('click');
+
+ expectIndeterminateState(true);
+ });
+ });
+
+ it('emits a delete event when selected', async () => {
+ createComponent();
+
+ const first = findAllRowCheckboxes().at(0);
+
+ await first.setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ const [[items]] = wrapper.emitted('delete-files');
+ const [{ id }] = items;
+ expect(id).toBe(file.id);
+ });
+
+ it('emits delete event with both items when all are selected', async () => {
+ createComponent({ packageFiles: files });
+
+ await findCheckAllCheckbox().setChecked(true);
+
+ await findDeleteSelectedButton().trigger('click');
+
+ const [[items]] = wrapper.emitted('delete-files');
+ expect(items).toHaveLength(2);
+ });
+ });
+
+ describe('when user cannot delete', () => {
+ const canDelete = false;
+
+ it('delete selected button does not exist', () => {
+ createComponent({ canDelete });
+
+ expect(findDeleteSelectedButton().exists()).toBe(false);
+ });
+
+ it('checkboxes to select file are not visible', () => {
+ createComponent({ packageFiles: files, canDelete });
+
+ expect(findCheckAllCheckbox().exists()).toBe(false);
+ expect(findAllRowCheckboxes()).toHaveLength(0);
});
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
index f4e6d43812d..ec2e833552a 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
@@ -17,6 +17,12 @@ import HistoryItem from '~/vue_shared/components/registry/history_item.vue';
import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
import waitForPromises from 'helpers/wait_for_promises';
import getPackagePipelines from '~/packages_and_registries/package_registry/graphql/queries/get_package_pipelines.query.graphql';
+import Tracking from '~/tracking';
+import {
+ TRACKING_ACTION_CLICK_PIPELINE_LINK,
+ TRACKING_ACTION_CLICK_COMMIT_LINK,
+ TRACKING_LABEL_PACKAGE_HISTORY,
+} from '~/packages_and_registries/package_registry/constants';
Vue.use(VueApollo);
@@ -181,7 +187,6 @@ describe('Package History', () => {
it('link', () => {
const linkElement = findElementLink(element);
const exist = Boolean(link);
-
expect(linkElement.exists()).toBe(exist);
if (exist) {
expect(linkElement.attributes('href')).toBe(link);
@@ -189,4 +194,29 @@ describe('Package History', () => {
});
},
);
+ describe('tracking', () => {
+ let eventSpy;
+ const category = 'UI::Packages';
+
+ beforeEach(() => {
+ mountComponent();
+ eventSpy = jest.spyOn(Tracking, 'event');
+ });
+
+ it('clicking pipeline link tracks the right action', () => {
+ wrapper.vm.trackPipelineClick();
+ expect(eventSpy).toHaveBeenCalledWith(category, TRACKING_ACTION_CLICK_PIPELINE_LINK, {
+ category,
+ label: TRACKING_LABEL_PACKAGE_HISTORY,
+ });
+ });
+
+ it('clicking commit link tracks the right action', () => {
+ wrapper.vm.trackCommitClick();
+ expect(eventSpy).toHaveBeenCalledWith(category, TRACKING_ACTION_CLICK_COMMIT_LINK, {
+ category,
+ label: TRACKING_LABEL_PACKAGE_HISTORY,
+ });
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
index d306f7834f0..37416dcd4e7 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_title_spec.js
@@ -22,16 +22,21 @@ const packageWithTags = {
packageFiles: { nodes: packageFiles() },
};
+const defaultProvide = {
+ isGroupPage: false,
+};
+
describe('PackageTitle', () => {
let wrapper;
- async function createComponent(packageEntity = packageWithTags) {
+ async function createComponent(packageEntity = packageWithTags, provide = defaultProvide) {
wrapper = shallowMountExtended(PackageTitle, {
propsData: { packageEntity },
stubs: {
TitleArea,
GlSprintf,
},
+ provide,
directives: {
GlResizeObserver: createMockDirective(),
},
@@ -199,11 +204,22 @@ describe('PackageTitle', () => {
expect(findPipelineProject().exists()).toBe(false);
});
- it('correctly shows the pipeline project if there is one', async () => {
+ it('does not display the pipeline project on project page even if it exists', async () => {
await createComponent({
...packageData(),
pipelines: { nodes: packagePipelines() },
});
+ expect(findPipelineProject().exists()).toBe(false);
+ });
+
+ it('correctly shows the pipeline project on group page if there is one', async () => {
+ await createComponent(
+ {
+ ...packageData(),
+ pipelines: { nodes: packagePipelines() },
+ },
+ { isGroupPage: true },
+ );
expect(findPipelineProject().props()).toMatchObject({
text: packagePipelines()[0].project.name,
icon: 'review-list',
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
index f2fef6436a6..20acb0872e5 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
@@ -1,9 +1,10 @@
-import { GlLink, GlSprintf } from '@gitlab/ui';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { GlSprintf } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { packageData } from 'jest/packages_and_registries/package_registry/mock_data';
import InstallationTitle from '~/packages_and_registries/package_registry/components/details/installation_title.vue';
import PypiInstallation from '~/packages_and_registries/package_registry/components/details/pypi_installation.vue';
import {
+ PERSONAL_ACCESS_TOKEN_HELP_URL,
PACKAGE_TYPE_PYPI,
TRACKING_ACTION_COPY_PIP_INSTALL_COMMAND,
TRACKING_ACTION_COPY_PYPI_SETUP_COMMAND,
@@ -24,11 +25,12 @@ password = <your personal access token>`;
const pipCommand = () => wrapper.findByTestId('pip-command');
const setupInstruction = () => wrapper.findByTestId('pypi-setup-content');
+ const findAccessTokenLink = () => wrapper.findByTestId('access-token-link');
const findInstallationTitle = () => wrapper.findComponent(InstallationTitle);
- const findSetupDocsLink = () => wrapper.findComponent(GlLink);
+ const findSetupDocsLink = () => wrapper.findByTestId('pypi-docs-link');
function createComponent() {
- wrapper = shallowMountExtended(PypiInstallation, {
+ wrapper = mountExtended(PypiInstallation, {
propsData: {
packageEntity,
},
@@ -78,6 +80,12 @@ password = <your personal access token>`;
});
});
+ it('has a link to personal access token docs', () => {
+ expect(findAccessTokenLink().attributes()).toMatchObject({
+ href: PERSONAL_ACCESS_TOKEN_HELP_URL,
+ });
+ });
+
it('has a link to the docs', () => {
expect(findSetupDocsLink().attributes()).toMatchObject({
href: PYPI_HELP_PATH,
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
index c16c09b5326..eb1e76377ff 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/package_list_row_spec.js
@@ -123,7 +123,7 @@ describe('packages_list_row', () => {
findDeleteDropdown().vm.$emit('click');
await nextTick();
- expect(wrapper.emitted('packageToDelete')).toBeTruthy();
+ expect(wrapper.emitted('packageToDelete')).toHaveLength(1);
expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
});
});
diff --git a/spec/frontend/packages_and_registries/package_registry/mock_data.js b/spec/frontend/packages_and_registries/package_registry/mock_data.js
index d40feee582f..22236424e6a 100644
--- a/spec/frontend/packages_and_registries/package_registry/mock_data.js
+++ b/spec/frontend/packages_and_registries/package_registry/mock_data.js
@@ -141,6 +141,7 @@ export const packageData = (extend) => ({
});
export const conanMetadata = () => ({
+ __typename: 'ConanMetadata',
id: 'conan-1',
packageChannel: 'stable',
packageUsername: 'gitlab-org+gitlab-test',
@@ -148,9 +149,8 @@ export const conanMetadata = () => ({
recipePath: 'package-8/1.0.0/gitlab-org+gitlab-test/stable',
});
-const conanMetadataQuery = () => ({ ...conanMetadata(), __typename: 'ConanMetadata' });
-
export const composerMetadata = () => ({
+ __typename: 'ComposerMetadata',
targetSha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0',
composerJson: {
license: 'MIT',
@@ -158,19 +158,14 @@ export const composerMetadata = () => ({
},
});
-const composerMetadataQuery = () => ({
- ...composerMetadata(),
- __typename: 'ComposerMetadata',
-});
-
export const pypiMetadata = () => ({
+ __typename: 'PypiMetadata',
id: 'pypi-1',
requiredPython: '1.0.0',
});
-const pypiMetadataQuery = () => ({ ...pypiMetadata(), __typename: 'PypiMetadata' });
-
export const mavenMetadata = () => ({
+ __typename: 'MavenMetadata',
id: 'maven-1',
appName: 'appName',
appGroup: 'appGroup',
@@ -178,23 +173,20 @@ export const mavenMetadata = () => ({
path: 'path',
});
-const mavenMetadataQuery = () => ({ ...mavenMetadata(), __typename: 'MavenMetadata' });
-
export const nugetMetadata = () => ({
+ __typename: 'NugetMetadata',
id: 'nuget-1',
iconUrl: 'iconUrl',
licenseUrl: 'licenseUrl',
projectUrl: 'projectUrl',
});
-const nugetMetadataQuery = () => ({ ...nugetMetadata(), __typename: 'NugetMetadata' });
-
const packageTypeMetadataQueryMapping = {
- CONAN: conanMetadataQuery,
- COMPOSER: composerMetadataQuery,
- PYPI: pypiMetadataQuery,
- MAVEN: mavenMetadataQuery,
- NUGET: nugetMetadataQuery,
+ CONAN: conanMetadata,
+ COMPOSER: composerMetadata,
+ PYPI: pypiMetadata,
+ MAVEN: mavenMetadata,
+ NUGET: nugetMetadata,
};
export const pagination = (extend) => ({
@@ -221,6 +213,7 @@ export const packageDetailsQuery = (extendPackage) => ({
id: '1',
path: 'projectPath',
name: 'gitlab-test',
+ fullPath: 'gitlab-test',
},
tags: {
nodes: packageTags(),
@@ -231,6 +224,9 @@ export const packageDetailsQuery = (extendPackage) => ({
__typename: 'PipelineConnection',
},
packageFiles: {
+ pageInfo: {
+ hasNextPage: true,
+ },
nodes: packageFiles(),
__typename: 'PackageFileConnection',
},
@@ -310,16 +306,16 @@ export const packageDestroyMutationError = () => ({
],
});
-export const packageDestroyFileMutation = () => ({
+export const packageDestroyFilesMutation = () => ({
data: {
- destroyPackageFile: {
+ destroyPackageFiles: {
errors: [],
},
},
});
-export const packageDestroyFileMutationError = () => ({
+export const packageDestroyFilesMutationError = () => ({
data: {
- destroyPackageFile: null,
+ destroyPackageFiles: null,
},
errors: [
{
@@ -331,7 +327,7 @@ export const packageDestroyFileMutationError = () => ({
column: 3,
},
],
- path: ['destroyPackageFile'],
+ path: ['destroyPackageFiles'],
},
],
});
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
index 3cadb001c58..de78e6bb87b 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/details_spec.js
@@ -22,6 +22,8 @@ import {
PACKAGE_TYPE_COMPOSER,
DELETE_PACKAGE_FILE_SUCCESS_MESSAGE,
DELETE_PACKAGE_FILE_ERROR_MESSAGE,
+ DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
+ DELETE_PACKAGE_FILES_ERROR_MESSAGE,
PACKAGE_TYPE_NUGET,
PACKAGE_TYPE_MAVEN,
PACKAGE_TYPE_CONAN,
@@ -29,7 +31,7 @@ import {
PACKAGE_TYPE_NPM,
} from '~/packages_and_registries/package_registry/constants';
-import destroyPackageFileMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_file.mutation.graphql';
+import destroyPackageFilesMutation from '~/packages_and_registries/package_registry/graphql/mutations/destroy_package_files.mutation.graphql';
import getPackageDetails from '~/packages_and_registries/package_registry/graphql/queries/get_package_details.query.graphql';
import {
packageDetailsQuery,
@@ -38,8 +40,8 @@ import {
dependencyLinks,
emptyPackageDetailsQuery,
packageFiles,
- packageDestroyFileMutation,
- packageDestroyFileMutationError,
+ packageDestroyFilesMutation,
+ packageDestroyFilesMutationError,
} from '../mock_data';
jest.mock('~/flash');
@@ -58,6 +60,7 @@ describe('PackagesApp', () => {
emptyListIllustration: 'svgPath',
projectListUrl: 'projectListUrl',
groupListUrl: 'groupListUrl',
+ isGroupPage: false,
breadCrumbState,
};
@@ -65,14 +68,14 @@ describe('PackagesApp', () => {
function createComponent({
resolver = jest.fn().mockResolvedValue(packageDetailsQuery()),
- fileDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFileMutation()),
+ filesDeleteMutationResolver = jest.fn().mockResolvedValue(packageDestroyFilesMutation()),
routeId = '1',
} = {}) {
Vue.use(VueApollo);
const requestHandlers = [
[getPackageDetails, resolver],
- [destroyPackageFileMutation, fileDeleteMutationResolver],
+ [destroyPackageFilesMutation, filesDeleteMutationResolver],
];
apolloProvider = createMockApollo(requestHandlers);
@@ -110,6 +113,7 @@ describe('PackagesApp', () => {
const findDeleteButton = () => wrapper.findByTestId('delete-package');
const findPackageFiles = () => wrapper.findComponent(PackageFiles);
const findDeleteFileModal = () => wrapper.findByTestId('delete-file-modal');
+ const findDeleteFilesModal = () => wrapper.findByTestId('delete-files-modal');
const findVersionRows = () => wrapper.findAllComponents(VersionRow);
const noVersionsMessage = () => wrapper.findByTestId('no-versions-message');
const findDependenciesCountBadge = () => wrapper.findComponent(GlBadge);
@@ -288,6 +292,7 @@ describe('PackagesApp', () => {
expect(findPackageFiles().props('packageFiles')[0]).toMatchObject(expectedFile);
expect(findPackageFiles().props('canDelete')).toBe(packageData().canDestroy);
+ expect(findPackageFiles().props('isLoading')).toEqual(false);
});
it('does not render the package files table when the package is composer', async () => {
@@ -305,24 +310,69 @@ describe('PackagesApp', () => {
describe('deleting a file', () => {
const [fileToDelete] = packageFiles();
- const doDeleteFile = () => {
- findPackageFiles().vm.$emit('delete-file', fileToDelete);
+ const doDeleteFile = async () => {
+ findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
findDeleteFileModal().vm.$emit('primary');
return waitForPromises();
};
- it('opens a confirmation modal', async () => {
+ it('opens delete file confirmation modal', async () => {
createComponent();
await waitForPromises();
- findPackageFiles().vm.$emit('delete-file', fileToDelete);
+ const showDeleteFileSpy = jest.spyOn(wrapper.vm.$refs.deleteFileModal, 'show');
+ const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show');
+
+ findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
+
+ expect(showDeletePackageSpy).not.toBeCalled();
+ expect(showDeleteFileSpy).toBeCalled();
+ });
+
+ it('when its the only file opens delete package confirmation modal', async () => {
+ const [packageFile] = packageFiles();
+ const resolver = jest.fn().mockResolvedValue(
+ packageDetailsQuery({
+ packageFiles: {
+ pageInfo: {
+ hasNextPage: false,
+ },
+ nodes: [packageFile],
+ __typename: 'PackageFileConnection',
+ },
+ }),
+ );
+
+ createComponent({
+ resolver,
+ });
+
+ await waitForPromises();
+
+ const showDeleteFileSpy = jest.spyOn(wrapper.vm.$refs.deleteFileModal, 'show');
+ const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show');
+
+ findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
+
+ expect(showDeletePackageSpy).toBeCalled();
+ expect(showDeleteFileSpy).not.toBeCalled();
+ });
+
+ it('confirming on the modal sets the loading state', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findPackageFiles().vm.$emit('delete-files', [fileToDelete]);
+
+ findDeleteFileModal().vm.$emit('primary');
await nextTick();
- expect(findDeleteFileModal().exists()).toBe(true);
+ expect(findPackageFiles().props('isLoading')).toEqual(true);
});
it('confirming on the modal deletes the file and shows a success message', async () => {
@@ -344,7 +394,7 @@ describe('PackagesApp', () => {
describe('errors', () => {
it('shows an error when the mutation request fails', async () => {
- createComponent({ fileDeleteMutationResolver: jest.fn().mockRejectedValue() });
+ createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue() });
await waitForPromises();
await doDeleteFile();
@@ -358,9 +408,9 @@ describe('PackagesApp', () => {
it('shows an error when the mutation request returns an error payload', async () => {
createComponent({
- fileDeleteMutationResolver: jest
+ filesDeleteMutationResolver: jest
.fn()
- .mockResolvedValue(packageDestroyFileMutationError()),
+ .mockResolvedValue(packageDestroyFilesMutationError()),
});
await waitForPromises();
@@ -374,6 +424,117 @@ describe('PackagesApp', () => {
});
});
});
+
+ describe('deleting multiple files', () => {
+ const doDeleteFiles = async () => {
+ findPackageFiles().vm.$emit('delete-files', packageFiles());
+
+ findDeleteFilesModal().vm.$emit('primary');
+
+ return waitForPromises();
+ };
+
+ it('opens delete files confirmation modal', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ const showDeleteFilesSpy = jest.spyOn(wrapper.vm.$refs.deleteFilesModal, 'show');
+
+ findPackageFiles().vm.$emit('delete-files', packageFiles());
+
+ expect(showDeleteFilesSpy).toBeCalled();
+ });
+
+ it('confirming on the modal sets the loading state', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findPackageFiles().vm.$emit('delete-files', packageFiles());
+
+ findDeleteFilesModal().vm.$emit('primary');
+
+ await nextTick();
+
+ expect(findPackageFiles().props('isLoading')).toEqual(true);
+ });
+
+ it('confirming on the modal deletes the file and shows a success message', async () => {
+ const resolver = jest.fn().mockResolvedValue(packageDetailsQuery());
+ createComponent({ resolver });
+
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_SUCCESS_MESSAGE,
+ }),
+ );
+ // we are re-fetching the package details, so we expect the resolver to have been called twice
+ expect(resolver).toHaveBeenCalledTimes(2);
+ });
+
+ describe('errors', () => {
+ it('shows an error when the mutation request fails', async () => {
+ createComponent({ filesDeleteMutationResolver: jest.fn().mockRejectedValue() });
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+ }),
+ );
+ });
+
+ it('shows an error when the mutation request returns an error payload', async () => {
+ createComponent({
+ filesDeleteMutationResolver: jest
+ .fn()
+ .mockResolvedValue(packageDestroyFilesMutationError()),
+ });
+ await waitForPromises();
+
+ await doDeleteFiles();
+
+ expect(createFlash).toHaveBeenCalledWith(
+ expect.objectContaining({
+ message: DELETE_PACKAGE_FILES_ERROR_MESSAGE,
+ }),
+ );
+ });
+ });
+ });
+
+ describe('deleting all files', () => {
+ it('opens the delete package confirmation modal', async () => {
+ const resolver = jest.fn().mockResolvedValue(
+ packageDetailsQuery({
+ packageFiles: {
+ pageInfo: {
+ hasNextPage: false,
+ },
+ nodes: packageFiles(),
+ },
+ }),
+ );
+ createComponent({
+ resolver,
+ });
+
+ await waitForPromises();
+
+ const showDeletePackageSpy = jest.spyOn(wrapper.vm.$refs.deleteModal, 'show');
+
+ findPackageFiles().vm.$emit('delete-files', packageFiles());
+
+ expect(showDeletePackageSpy).toBeCalled();
+ });
+ });
});
describe('versions', () => {
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
index 108d9478788..5d08574234c 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
+++ b/spec/frontend/packages_and_registries/settings/project/settings/components/__snapshots__/container_expiration_policy_form_spec.js.snap
@@ -5,6 +5,7 @@ exports[`Container Expiration Policy Settings Form Cadence matches snapshot 1`]
class="gl-mr-7 gl-mb-0!"
data-testid="cadence-dropdown"
description=""
+ dropdownclass=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Run cleanup:"
name="cadence"
@@ -24,6 +25,7 @@ exports[`Container Expiration Policy Settings Form Keep N matches snapshot 1`] =
<expiration-dropdown-stub
data-testid="keep-n-dropdown"
description=""
+ dropdownclass=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Keep the most recent:"
name="keep-n"
@@ -47,6 +49,7 @@ exports[`Container Expiration Policy Settings Form OlderThan matches snapshot 1`
<expiration-dropdown-stub
data-testid="older-than-dropdown"
description=""
+ dropdownclass=""
formoptions="[object Object],[object Object],[object Object],[object Object],[object Object],[object Object]"
label="Remove tags older than:"
name="older-than"
diff --git a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
index d4b6c66ddeb..0696144215c 100644
--- a/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
+++ b/spec/frontend/packages_and_registries/settings/project/settings/mock_data.js
@@ -1,4 +1,5 @@
export const containerExpirationPolicyData = () => ({
+ __typename: 'ContainerExpirationPolicy',
cadence: 'EVERY_DAY',
enabled: true,
keepN: 'TEN_TAGS',
@@ -13,7 +14,6 @@ export const expirationPolicyPayload = (override) => ({
project: {
id: '1',
containerExpirationPolicy: {
- __typename: 'ContainerExpirationPolicy',
...containerExpirationPolicyData(),
...override,
},
@@ -42,6 +42,7 @@ export const expirationPolicyMutationPayload = ({ override, errors = [] } = {})
});
export const packagesCleanupPolicyData = {
+ __typename: 'PackagesCleanupPolicy',
keepNDuplicatedPackageFiles: 'ALL_PACKAGE_FILES',
nextRunAt: '2020-11-19T07:37:03.941Z',
};
@@ -51,7 +52,6 @@ export const packagesCleanupPolicyPayload = (override) => ({
project: {
id: '1',
packagesCleanupPolicy: {
- __typename: 'PackagesCleanupPolicy',
...packagesCleanupPolicyData,
...override,
},
diff --git a/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js b/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
index 542eb2f3ab8..85ed94b748d 100644
--- a/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
+++ b/spec/frontend/pages/admin/application_settings/account_and_limits_spec.js
@@ -23,17 +23,17 @@ describe('AccountAndLimits', () => {
describe('Changing of userInternalRegex when userDefaultExternal', () => {
it('is unchecked', () => {
- expect($userDefaultExternal.prop('checked')).toBeFalsy();
+ expect($userDefaultExternal.prop('checked')).toBe(false);
expect($userInternalRegex.placeholder).toEqual(PLACEHOLDER_USER_EXTERNAL_DEFAULT_FALSE);
- expect($userInternalRegex.readOnly).toBeTruthy();
+ expect($userInternalRegex.readOnly).toBe(true);
});
it('is checked', () => {
if (!$userDefaultExternal.prop('checked')) $userDefaultExternal.click();
- expect($userDefaultExternal.prop('checked')).toBeTruthy();
+ expect($userDefaultExternal.prop('checked')).toBe(true);
expect($userInternalRegex.placeholder).toEqual(PLACEHOLDER_USER_EXTERNAL_DEFAULT_TRUE);
- expect($userInternalRegex.readOnly).toBeFalsy();
+ expect($userInternalRegex.readOnly).toBe(false);
});
});
});
diff --git a/spec/frontend/pages/groups/new/components/app_spec.js b/spec/frontend/pages/groups/new/components/app_spec.js
new file mode 100644
index 00000000000..ab483316086
--- /dev/null
+++ b/spec/frontend/pages/groups/new/components/app_spec.js
@@ -0,0 +1,39 @@
+import { shallowMount } from '@vue/test-utils';
+import App from '~/pages/groups/new/components/app.vue';
+import NewNamespacePage from '~/vue_shared/new_namespace/new_namespace_page.vue';
+
+describe('App component', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(App, { propsData });
+ };
+
+ const findNewNamespacePage = () => wrapper.findComponent(NewNamespacePage);
+
+ const findCreateGroupPanel = () =>
+ findNewNamespacePage()
+ .props('panels')
+ .find((panel) => panel.name === 'create-group-pane');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('creates correct component for group creation', () => {
+ createComponent();
+
+ expect(findNewNamespacePage().props('initialBreadcrumb')).toBe('New group');
+ expect(findCreateGroupPanel().title).toBe('Create group');
+ });
+
+ it('creates correct component for subgroup creation', () => {
+ const props = { parentGroupName: 'parent', importExistingGroupPath: '/path' };
+
+ createComponent(props);
+
+ expect(findNewNamespacePage().props('initialBreadcrumb')).toBe('parent');
+ expect(findCreateGroupPanel().title).toBe('Create subgroup');
+ expect(findCreateGroupPanel().detailProps).toEqual(props);
+ });
+});
diff --git a/spec/frontend/pages/groups/new/components/create_group_description_details_spec.js b/spec/frontend/pages/groups/new/components/create_group_description_details_spec.js
new file mode 100644
index 00000000000..56a1fd03f71
--- /dev/null
+++ b/spec/frontend/pages/groups/new/components/create_group_description_details_spec.js
@@ -0,0 +1,57 @@
+import { shallowMount } from '@vue/test-utils';
+import { GlSprintf, GlLink } from '@gitlab/ui';
+import CreateGroupDescriptionDetails from '~/pages/groups/new/components/create_group_description_details.vue';
+import { helpPagePath } from '~/helpers/help_page_helper';
+
+describe('CreateGroupDescriptionDetails component', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(CreateGroupDescriptionDetails, {
+ propsData,
+ stubs: { GlSprintf, GlLink },
+ });
+ };
+
+ const findLinkHref = (at) => wrapper.findAllComponents(GlLink).at(at);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('creates correct component for group creation', () => {
+ createComponent();
+
+ const groupsLink = findLinkHref(0);
+ expect(groupsLink.attributes('href')).toBe(helpPagePath('user/group/index'));
+ expect(groupsLink.text()).toBe('Groups');
+
+ const subgroupsLink = findLinkHref(1);
+ expect(subgroupsLink.text()).toBe('subgroups');
+ expect(subgroupsLink.attributes('href')).toBe(helpPagePath('user/group/subgroups/index'));
+
+ expect(wrapper.text()).toBe(
+ 'Groups allow you to manage and collaborate across multiple projects. Members of a group have access to all of its projects. Groups can also be nested by creating subgroups.',
+ );
+ });
+
+ it('creates correct component for subgroup creation', () => {
+ createComponent({ parentGroupName: 'parent', importExistingGroupPath: '/path' });
+
+ const groupsLink = findLinkHref(0);
+ expect(groupsLink.attributes('href')).toBe(helpPagePath('user/group/index'));
+ expect(groupsLink.text()).toBe('Groups');
+
+ const subgroupsLink = findLinkHref(1);
+ expect(subgroupsLink.text()).toBe('subgroups');
+ expect(subgroupsLink.attributes('href')).toBe(helpPagePath('user/group/subgroups/index'));
+
+ const importGroupLink = findLinkHref(2);
+ expect(importGroupLink.text()).toBe('import an existing group');
+ expect(importGroupLink.attributes('href')).toBe('/path');
+
+ expect(wrapper.text()).toBe(
+ 'Groups and subgroups allow you to manage and collaborate across multiple projects. Members of a group have access to all of its projects. You can also import an existing group.',
+ );
+ });
+});
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index 43361bb6f24..21a38f066d9 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -3,6 +3,33 @@
exports[`Code Coverage when fetching data is successful matches the snapshot 1`] = `
<div>
<div
+ class="gl-display-flex gl-justify-content-space-between gl-align-items-center gl-border-t gl-pt-4 gl-mb-3"
+ >
+ <h4
+ class="gl-m-0"
+ sub-header=""
+ >
+ <gl-sprintf-stub
+ message="Code coverage statistics for %{ref} %{start_date} - %{end_date}"
+ />
+ </h4>
+
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
+ data-testid="download-button"
+ href="url/"
+ icon=""
+ size="small"
+ variant="default"
+ >
+
+ Download raw data (.csv)
+
+ </gl-button-stub>
+ </div>
+
+ <div
class="gl-mt-3 gl-mb-3"
>
<!---->
@@ -79,6 +106,7 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
legendmaxtext="Max"
legendmintext="Min"
option="[object Object]"
+ responsive=""
thresholds=""
/>
</div>
diff --git a/spec/frontend/pages/projects/graphs/code_coverage_spec.js b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
index 0f763e3220a..f272891919d 100644
--- a/spec/frontend/pages/projects/graphs/code_coverage_spec.js
+++ b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
@@ -15,17 +15,26 @@ describe('Code Coverage', () => {
let mockAxios;
const graphEndpoint = '/graph';
+ const graphStartDate = '13 February';
+ const graphEndDate = '12 May';
+ const graphRef = 'master';
+ const graphCsvPath = 'url/';
const findAlert = () => wrapper.find(GlAlert);
const findAreaChart = () => wrapper.find(GlAreaChart);
const findAllDropdownItems = () => wrapper.findAll(GlDropdownItem);
const findFirstDropdownItem = () => findAllDropdownItems().at(0);
const findSecondDropdownItem = () => findAllDropdownItems().at(1);
+ const findDownloadButton = () => wrapper.find('[data-testid="download-button"]');
const createComponent = () => {
wrapper = shallowMount(CodeCoverage, {
propsData: {
graphEndpoint,
+ graphStartDate,
+ graphEndDate,
+ graphRef,
+ graphCsvPath,
},
});
};
@@ -64,6 +73,10 @@ describe('Code Coverage', () => {
it('shows no error messages', () => {
expect(findAlert().exists()).toBe(false);
});
+
+ it('does not render download button', () => {
+ expect(findDownloadButton().exists()).toBe(true);
+ });
});
describe('when fetching data fails', () => {
@@ -112,6 +125,10 @@ describe('Code Coverage', () => {
it('still renders an empty graph', () => {
expect(findAreaChart().exists()).toBe(true);
});
+
+ it('does not render download button', () => {
+ expect(findDownloadButton().exists()).toBe(false);
+ });
});
describe('dropdown options', () => {
@@ -146,8 +163,8 @@ describe('Code Coverage', () => {
await nextTick();
- expect(findFirstDropdownItem().attributes('ischecked')).toBeFalsy();
- expect(findSecondDropdownItem().attributes('ischecked')).toBeTruthy();
+ expect(findFirstDropdownItem().attributes('ischecked')).toBe(undefined);
+ expect(findSecondDropdownItem().attributes('ischecked')).toBe('true');
});
it('updates the graph data when selecting a different option in dropdown', async () => {
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
index 42eeff89bf4..5b9c48f0d9b 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/timezone_dropdown_spec.js
@@ -226,7 +226,6 @@ describe('Timezone Dropdown', () => {
it('returns the correct object if the identifier exists', () => {
const res = findTimezoneByIdentifier(tzList, identifier);
- expect(res).toBeTruthy();
expect(res).toBe(tzList[2]);
});
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index 85660d09baa..f908508c4b5 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -127,6 +127,7 @@ describe('Settings Panel', () => {
const findOperationsVisibilityInput = () =>
findOperationsSettings().findComponent(ProjectFeatureSetting);
const findConfirmDangerButton = () => wrapper.findComponent(ConfirmDanger);
+ const findEnvironmentsSettings = () => wrapper.findComponent({ ref: 'environments-settings' });
afterEach(() => {
wrapper.destroy();
@@ -786,4 +787,23 @@ describe('Settings Panel', () => {
expect(findOperationsSettings().exists()).toBe(true);
});
});
+
+ describe('Environments', () => {
+ describe('with feature flag', () => {
+ it('should show the environments toggle', () => {
+ wrapper = mountComponent({
+ glFeatures: { splitOperationsVisibilityPermissions: true },
+ });
+
+ expect(findEnvironmentsSettings().exists()).toBe(true);
+ });
+ });
+ describe('without feature flag', () => {
+ it('should not show the environments toggle', () => {
+ wrapper = mountComponent({});
+
+ expect(findEnvironmentsSettings().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
index a5db10d106d..204c48f8de1 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -1,5 +1,5 @@
import { nextTick } from 'vue';
-import { GlAlert, GlButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
+import { GlAlert, GlButton, GlFormInput, GlFormGroup, GlSegmentedControl } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
@@ -106,6 +106,7 @@ describe('WikiForm', () => {
MarkdownField,
GlAlert,
GlButton,
+ GlSegmentedControl,
LocalStorageSync: stubComponent(LocalStorageSync),
GlFormInput,
GlFormGroup,
@@ -317,20 +318,20 @@ describe('WikiForm', () => {
});
describe('when content editor is not active', () => {
- it('displays "Edit rich text" label in the toggle editing mode button', () => {
- expect(findToggleEditingModeButton().text()).toBe('Edit rich text');
+ it('displays "Source" label in the toggle editing mode button', () => {
+ expect(findToggleEditingModeButton().props().checked).toBe('source');
});
describe('when clicking the toggle editing mode button', () => {
beforeEach(async () => {
- await findToggleEditingModeButton().trigger('click');
+ await findToggleEditingModeButton().vm.$emit('input', 'richText');
});
it('hides the classic editor', () => {
expect(findClassicEditor().exists()).toBe(false);
});
- it('hides the content editor', () => {
+ it('shows the content editor', () => {
expect(findContentEditor().exists()).toBe(true);
});
});
@@ -342,7 +343,7 @@ describe('WikiForm', () => {
expect(findContentEditor().exists()).toBe(false);
// enable content editor
- await findLocalStorageSync().vm.$emit('input', true);
+ await findLocalStorageSync().vm.$emit('input', 'richText');
expect(findContentEditor().exists()).toBe(true);
expect(findClassicEditor().exists()).toBe(false);
@@ -352,17 +353,18 @@ describe('WikiForm', () => {
describe('when content editor is active', () => {
let mockContentEditor;
- beforeEach(async () => {
+ beforeEach(() => {
+ createWrapper();
mockContentEditor = {
getSerializedContent: jest.fn(),
setSerializedContent: jest.fn(),
};
- await findToggleEditingModeButton().trigger('click');
+ findToggleEditingModeButton().vm.$emit('input', 'richText');
});
- it('displays "Edit source" label in the toggle editing mode button', () => {
- expect(findToggleEditingModeButton().text()).toBe('Edit source');
+ it('displays "Edit Rich" label in the toggle editing mode button', () => {
+ expect(findToggleEditingModeButton().props().checked).toBe('richText');
});
describe('when clicking the toggle editing mode button', () => {
@@ -374,7 +376,8 @@ describe('WikiForm', () => {
);
findContentEditor().vm.$emit('initialized', mockContentEditor);
- await findToggleEditingModeButton().trigger('click');
+ await findToggleEditingModeButton().vm.$emit('input', 'source');
+ await nextTick();
});
it('hides the content editor', () => {
@@ -389,6 +392,38 @@ describe('WikiForm', () => {
expect(findContent().element.value).toBe(contentEditorFakeSerializedContent);
});
});
+
+ describe('when content editor is loading', () => {
+ beforeEach(async () => {
+ findContentEditor().vm.$emit('loading');
+
+ await nextTick();
+ });
+
+ it('disables toggle editing mode button', () => {
+ expect(findToggleEditingModeButton().attributes().disabled).toBe('true');
+ });
+
+ describe('when content editor loads successfully', () => {
+ it('enables toggle editing mode button', async () => {
+ findContentEditor().vm.$emit('loadingSuccess');
+
+ await nextTick();
+
+ expect(findToggleEditingModeButton().attributes().disabled).not.toBeDefined();
+ });
+ });
+
+ describe('when content editor fails to load', () => {
+ it('enables toggle editing mode button', async () => {
+ findContentEditor().vm.$emit('loadingError');
+
+ await nextTick();
+
+ expect(findToggleEditingModeButton().attributes().disabled).not.toBeDefined();
+ });
+ });
+ });
});
});
@@ -398,7 +433,7 @@ describe('WikiForm', () => {
createWrapper({ mountFn: mount });
mock.onPost(/preview-markdown/).reply(400);
- await findToggleEditingModeButton().trigger('click');
+ await findToggleEditingModeButton().vm.$emit('input', 'richText');
// try waiting for content editor to load (but it will never actually load)
await waitForPromises();
@@ -410,7 +445,7 @@ describe('WikiForm', () => {
describe('toggling editing modes to the classic editor', () => {
beforeEach(() => {
- return findToggleEditingModeButton().trigger('click');
+ return findToggleEditingModeButton().vm.$emit('input', 'source');
});
it('switches to classic editor', () => {
@@ -426,7 +461,7 @@ describe('WikiForm', () => {
mock.onPost(/preview-markdown/).reply(200, { body: '<p>hello <strong>world</strong></p>' });
- await findToggleEditingModeButton().trigger('click');
+ await findToggleEditingModeButton().vm.$emit('input', 'richText');
await waitForPromises();
});
@@ -463,7 +498,6 @@ describe('WikiForm', () => {
it('triggers tracking events on form submit', async () => {
await triggerFormSubmit();
-
expect(trackingSpy).toHaveBeenCalledWith(undefined, SAVED_USING_CONTENT_EDITOR_ACTION, {
label: WIKI_CONTENT_EDITOR_TRACKING_LABEL,
});
diff --git a/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
index bf5d15516c2..7e1e5004d91 100644
--- a/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
+++ b/spec/frontend/pipeline_editor/components/drawer/cards/first_pipeline_card_spec.js
@@ -8,16 +8,8 @@ describe('First pipeline card', () => {
let wrapper;
let trackingSpy;
- const defaultProvide = {
- runnerHelpPagePath: '/help/runners',
- };
-
const createComponent = () => {
- wrapper = mount(FirstPipelineCard, {
- provide: {
- ...defaultProvide,
- },
- });
+ wrapper = mount(FirstPipelineCard);
};
const getLinkByName = (name) => getByRole(wrapper.element, 'link', { name });
@@ -43,7 +35,7 @@ describe('First pipeline card', () => {
});
it('renders the link', () => {
- expect(findRunnersLink().href).toContain(defaultProvide.runnerHelpPagePath);
+ expect(findRunnersLink().href).toBe(wrapper.vm.$options.RUNNER_HELP_URL);
});
describe('tracking', () => {
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
deleted file mode 100644
index 238942a34ff..00000000000
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
+++ /dev/null
@@ -1,72 +0,0 @@
-import { GlAlert, GlLink } from '@gitlab/ui';
-import { mount, shallowMount } from '@vue/test-utils';
-import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
-import { mergeUnwrappedCiConfig, mockLintHelpPagePath } from '../../mock_data';
-
-describe('~/pipeline_editor/components/lint/ci_lint.vue', () => {
- let wrapper;
-
- const createComponent = ({ props, mountFn = shallowMount } = {}) => {
- wrapper = mountFn(CiLint, {
- provide: {
- lintHelpPagePath: mockLintHelpPagePath,
- },
- propsData: {
- ciConfig: mergeUnwrappedCiConfig(),
- ...props,
- },
- });
- };
-
- const findAllByTestId = (selector) => wrapper.findAll(`[data-testid="${selector}"]`);
- const findAlert = () => wrapper.find(GlAlert);
- const findLintParameters = () => findAllByTestId('ci-lint-parameter');
- const findLintParameterAt = (i) => findLintParameters().at(i);
- const findLintValueAt = (i) => findAllByTestId('ci-lint-value').at(i);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('Valid Results', () => {
- beforeEach(() => {
- createComponent({ props: { isValid: true }, mountFn: mount });
- });
-
- it('displays valid results', () => {
- expect(findAlert().text()).toMatch('Status: Syntax is correct.');
- });
-
- it('displays link to the right help page', () => {
- expect(findAlert().find(GlLink).attributes('href')).toBe(mockLintHelpPagePath);
- });
-
- it('displays jobs', () => {
- expect(findLintParameters()).toHaveLength(3);
-
- expect(findLintParameterAt(0).text()).toBe('Test Job - job_test_1');
- expect(findLintParameterAt(1).text()).toBe('Test Job - job_test_2');
- expect(findLintParameterAt(2).text()).toBe('Build Job - job_build');
- });
-
- it('displays jobs details', () => {
- expect(findLintParameters()).toHaveLength(3);
-
- expect(findLintValueAt(0).text()).toMatchInterpolatedText(
- 'echo "test 1" Only policy: branches, tags When: on_success',
- );
- expect(findLintValueAt(1).text()).toMatchInterpolatedText(
- 'echo "test 2" Only policy: branches, tags When: on_success',
- );
- expect(findLintValueAt(2).text()).toMatchInterpolatedText(
- 'echo "build" Only policy: branches, tags When: on_success',
- );
- });
-
- it('displays invalid results', () => {
- createComponent({ props: { isValid: false }, mountFn: mount });
-
- expect(findAlert().text()).toMatch('Status: Syntax is incorrect.');
- });
- });
-});
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 87a7f07f7d4..2f3e1b49b37 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -1,11 +1,12 @@
+// TODO
+
import { GlAlert, GlBadge, GlLoadingIcon, GlTabs } from '@gitlab/ui';
-import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
import createMockApollo from 'helpers/mock_apollo_helper';
import setWindowLocation from 'helpers/set_window_location_helper';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
-import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import CiValidate from '~/pipeline_editor/components/validate/ci_validate.vue';
import WalkthroughPopover from '~/pipeline_editor/components/popovers/walkthrough_popover.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
@@ -30,8 +31,7 @@ import {
mockLintResponseWithoutMerged,
} from '../mock_data';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+Vue.use(VueApollo);
Vue.config.ignoredElements = ['gl-emoji'];
@@ -64,7 +64,12 @@ describe('Pipeline editor tabs component', () => {
};
},
provide: {
+ ciConfigPath: '/path/to/ci-config',
ciLintPath: mockCiLintPath,
+ currentBranch: 'main',
+ projectFullPath: '/path/to/project',
+ simulatePipelineHelpPagePath: 'path/to/help/page',
+ validateTabIllustrationPath: 'path/to/svg',
...provide,
},
stubs: {
@@ -88,21 +93,18 @@ describe('Pipeline editor tabs component', () => {
provide,
mountFn,
options: {
- localVue,
apolloProvider: mockApollo,
},
});
};
const findEditorTab = () => wrapper.find('[data-testid="editor-tab"]');
- const findLintTab = () => wrapper.find('[data-testid="lint-tab"]');
const findMergedTab = () => wrapper.find('[data-testid="merged-tab"]');
const findValidateTab = () => wrapper.find('[data-testid="validate-tab"]');
const findVisualizationTab = () => wrapper.find('[data-testid="visualization-tab"]');
const findAlert = () => wrapper.findComponent(GlAlert);
const findBadge = () => wrapper.findComponent(GlBadge);
- const findCiLint = () => wrapper.findComponent(CiLint);
const findCiValidate = () => wrapper.findComponent(CiValidate);
const findGlTabs = () => wrapper.findComponent(GlTabs);
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
@@ -121,7 +123,8 @@ describe('Pipeline editor tabs component', () => {
describe('editor tab', () => {
it('displays editor only after the tab is mounted', async () => {
- createComponent({ mountFn: mount });
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ createComponentWithApollo({ mountFn: mount });
expect(findTextEditor().exists()).toBe(false);
@@ -156,138 +159,57 @@ describe('Pipeline editor tabs component', () => {
});
describe('validate tab', () => {
- describe('with simulatePipeline feature flag ON', () => {
- describe('after loading', () => {
- beforeEach(() => {
- createComponent({
- provide: { glFeatures: { simulatePipeline: true } },
- });
- });
-
- it('displays the tab and the validate component', () => {
- expect(findValidateTab().exists()).toBe(true);
- expect(findCiValidate().exists()).toBe(true);
- });
+ describe('after loading', () => {
+ beforeEach(() => {
+ createComponent();
});
- describe('NEW badge', () => {
- describe('default', () => {
- beforeEach(() => {
- mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- createComponentWithApollo({
- mountFn: mount,
- props: {
- currentTab: VALIDATE_TAB,
- },
- provide: {
- glFeatures: { simulatePipeline: true },
- ciConfigPath: '/path/to/ci-config',
- currentBranch: 'main',
- projectFullPath: '/path/to/project',
- simulatePipelineHelpPagePath: 'path/to/help/page',
- validateTabIllustrationPath: 'path/to/svg',
- },
- });
- });
-
- it('renders badge by default', () => {
- expect(findBadge().exists()).toBe(true);
- expect(findBadge().text()).toBe(wrapper.vm.$options.i18n.new);
- });
-
- it('hides badge when moving away from the validate tab', async () => {
- expect(findBadge().exists()).toBe(true);
-
- await findEditorTab().vm.$emit('click');
-
- expect(findBadge().exists()).toBe(false);
- });
- });
-
- describe('if badge has been dismissed before', () => {
- beforeEach(() => {
- localStorage.setItem(VALIDATE_TAB_BADGE_DISMISSED_KEY, 'true');
- mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
- createComponentWithApollo({
- mountFn: mount,
- provide: {
- glFeatures: { simulatePipeline: true },
- ciConfigPath: '/path/to/ci-config',
- currentBranch: 'main',
- projectFullPath: '/path/to/project',
- simulatePipelineHelpPagePath: 'path/to/help/page',
- validateTabIllustrationPath: 'path/to/svg',
- },
- });
- });
-
- it('does not render badge if it has been dismissed before', () => {
- expect(findBadge().exists()).toBe(false);
- });
- });
+ it('displays the tab and the validate component', () => {
+ expect(findValidateTab().exists()).toBe(true);
+ expect(findCiValidate().exists()).toBe(true);
});
});
- describe('with simulatePipeline feature flag OFF', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: {
- simulatePipeline: false,
+ describe('NEW badge', () => {
+ describe('default', () => {
+ beforeEach(() => {
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ createComponentWithApollo({
+ mountFn: mount,
+ props: {
+ currentTab: VALIDATE_TAB,
},
- },
+ });
});
- });
- it('does not render the tab and the validate component', () => {
- expect(findValidateTab().exists()).toBe(false);
- expect(findCiValidate().exists()).toBe(false);
- });
- });
- });
+ it('renders badge by default', () => {
+ expect(findBadge().exists()).toBe(true);
+ expect(findBadge().text()).toBe(wrapper.vm.$options.i18n.new);
+ });
- describe('lint tab', () => {
- describe('while loading', () => {
- beforeEach(() => {
- createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
- });
+ it('hides badge when moving away from the validate tab', async () => {
+ expect(findBadge().exists()).toBe(true);
- it('displays a loading icon if the lint query is loading', () => {
- expect(findLoadingIcon().exists()).toBe(true);
- });
+ await findEditorTab().vm.$emit('click');
- it('does not display the lint component', () => {
- expect(findCiLint().exists()).toBe(false);
- });
- });
- describe('after loading', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('display the tab and the lint component', () => {
- expect(findLintTab().exists()).toBe(true);
- expect(findCiLint().exists()).toBe(true);
+ expect(findBadge().exists()).toBe(false);
+ });
});
- });
- describe('with simulatePipeline feature flag ON', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: {
- simulatePipeline: true,
- },
- },
+ describe('if badge has been dismissed before', () => {
+ beforeEach(() => {
+ localStorage.setItem(VALIDATE_TAB_BADGE_DISMISSED_KEY, 'true');
+ mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
+ createComponentWithApollo({ mountFn: mount });
});
- });
- it('does not render the tab and the lint component', () => {
- expect(findLintTab().exists()).toBe(false);
- expect(findCiLint().exists()).toBe(false);
+ it('does not render badge if it has been dismissed before', () => {
+ expect(findBadge().exists()).toBe(false);
+ });
});
});
});
+
describe('merged tab', () => {
describe('while loading', () => {
beforeEach(() => {
@@ -328,19 +250,19 @@ describe('Pipeline editor tabs component', () => {
describe('show tab content based on status', () => {
it.each`
- appStatus | editor | viz | lint | merged
+ appStatus | editor | viz | validate | merged
${undefined} | ${true} | ${true} | ${true} | ${true}
- ${EDITOR_APP_STATUS_EMPTY} | ${true} | ${false} | ${false} | ${false}
+ ${EDITOR_APP_STATUS_EMPTY} | ${true} | ${false} | ${true} | ${false}
${EDITOR_APP_STATUS_INVALID} | ${true} | ${false} | ${true} | ${false}
${EDITOR_APP_STATUS_VALID} | ${true} | ${true} | ${true} | ${true}
`(
- 'when status is $appStatus, we show - editor:$editor | viz:$viz | lint:$lint | merged:$merged ',
- ({ appStatus, editor, viz, lint, merged }) => {
+ 'when status is $appStatus, we show - editor:$editor | viz:$viz | validate:$validate | merged:$merged ',
+ ({ appStatus, editor, viz, validate, merged }) => {
createComponent({ appStatus });
expect(findTextEditor().exists()).toBe(editor);
expect(findPipelineGraph().exists()).toBe(viz);
- expect(findCiLint().exists()).toBe(lint);
+ expect(findValidateTab().exists()).toBe(validate);
expect(findMergedPreview().exists()).toBe(merged);
},
);
@@ -386,11 +308,8 @@ describe('Pipeline editor tabs component', () => {
describe('pipeline editor walkthrough', () => {
describe('when isNewCiConfigFile prop is true (default)', () => {
- beforeEach(async () => {
- createComponent({
- mountFn: mount,
- });
- await nextTick();
+ beforeEach(() => {
+ createComponent();
});
it('shows walkthrough popover', async () => {
@@ -400,8 +319,7 @@ describe('Pipeline editor tabs component', () => {
describe('when isNewCiConfigFile prop is false', () => {
it('does not show walkthrough popover', async () => {
- createComponent({ props: { isNewCiConfigFile: false }, mountFn: mount });
- await nextTick();
+ createComponent({ props: { isNewCiConfigFile: false } });
expect(findWalkthroughPopover().exists()).toBe(false);
});
});
@@ -411,7 +329,6 @@ describe('Pipeline editor tabs component', () => {
const handler = jest.fn();
createComponent({
- mountFn: mount,
listeners: {
event: handler,
},
diff --git a/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
index f5f01b675b2..09d4f9736ad 100644
--- a/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
+++ b/spec/frontend/pipeline_editor/components/validate/ci_validate_spec.js
@@ -2,6 +2,7 @@ import { GlAlert, GlDropdown, GlIcon, GlLoadingIcon, GlPopover } from '@gitlab/u
import { nextTick } from 'vue';
import { createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
+import { mockTracking, unmockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import CiLintResults from '~/pipeline_editor/components/lint/ci_lint_results.vue';
@@ -9,6 +10,7 @@ import CiValidate, { i18n } from '~/pipeline_editor/components/validate/ci_valid
import ValidatePipelinePopover from '~/pipeline_editor/components/popovers/validate_pipeline_popover.vue';
import getBlobContent from '~/pipeline_editor/graphql/queries/blob_content.query.graphql';
import lintCIMutation from '~/pipeline_editor/graphql/mutations/client/lint_ci.mutation.graphql';
+import { pipelineEditorTrackingOptions } from '~/pipeline_editor/constants';
import {
mockBlobContentQueryResponse,
mockCiLintPath,
@@ -24,6 +26,7 @@ describe('Pipeline Editor Validate Tab', () => {
let wrapper;
let mockApollo;
let mockBlobContentData;
+ let trackingSpy;
const createComponent = ({
props,
@@ -140,9 +143,24 @@ describe('Pipeline Editor Validate Tab', () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
await createComponentWithApollo();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
});
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks the simulation event', () => {
+ const {
+ label,
+ actions: { simulatePipeline },
+ } = pipelineEditorTrackingOptions;
+ findCta().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, simulatePipeline, { label });
+ });
+
it('renders loading state while simulation is ongoing', async () => {
findCta().vm.$emit('click');
await nextTick();
@@ -159,7 +177,7 @@ describe('Pipeline Editor Validate Tab', () => {
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: lintCIMutation,
variables: {
- dry_run: true,
+ dry: true,
content: mockCiYml,
endpoint: mockCiLintPath,
},
@@ -224,10 +242,27 @@ describe('Pipeline Editor Validate Tab', () => {
mockBlobContentData.mockResolvedValue(mockBlobContentQueryResponse);
await createComponentWithApollo();
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(mockLintDataValid);
await findCta().vm.$emit('click');
});
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks the second simulation event', async () => {
+ const {
+ label,
+ actions: { resimulatePipeline },
+ } = pipelineEditorTrackingOptions;
+
+ await wrapper.setProps({ ciFileContent: 'new yaml content' });
+ findResultsCta().vm.$emit('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, resimulatePipeline, { label });
+ });
+
it('renders content change status', async () => {
await wrapper.setProps({ ciFileContent: 'new yaml content' });
@@ -243,7 +278,7 @@ describe('Pipeline Editor Validate Tab', () => {
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: lintCIMutation,
variables: {
- dry_run: true,
+ dry: true,
content: 'new yaml content',
endpoint: mockCiLintPath,
},
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index c6964f190b4..0cb7155c8c0 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -14,7 +14,7 @@ import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tab
import {
CREATE_TAB,
FILE_TREE_DISPLAY_KEY,
- LINT_TAB,
+ VALIDATE_TAB,
MERGED_TAB,
TABS_INDEX,
VISUALIZE_TAB,
@@ -138,7 +138,7 @@ describe('Pipeline editor home wrapper', () => {
tab | shouldShow
${MERGED_TAB} | ${false}
${VISUALIZE_TAB} | ${false}
- ${LINT_TAB} | ${false}
+ ${VALIDATE_TAB} | ${false}
${CREATE_TAB} | ${true}
`(
'when the active tab is $tab the commit form is shown: $shouldShow',
@@ -170,7 +170,7 @@ describe('Pipeline editor home wrapper', () => {
tab | shouldShow
${MERGED_TAB} | ${false}
${VISUALIZE_TAB} | ${false}
- ${LINT_TAB} | ${false}
+ ${VALIDATE_TAB} | ${false}
${CREATE_TAB} | ${true}
`(
'when the tab query param is $tab the commit form is shown: $shouldShow',
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
index eec55091efa..18dbd1ce9d6 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
@@ -39,6 +39,7 @@ describe('Pipeline New Form', () => {
const findSubmitButton = () => wrapper.find('[data-testid="run_pipeline_button"]');
const findVariableRows = () => wrapper.findAll('[data-testid="ci-variable-row"]');
const findRemoveIcons = () => wrapper.findAll('[data-testid="remove-ci-variable-row"]');
+ const findDropdowns = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-type"]');
const findKeyInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-key"]');
const findValueInputs = () => wrapper.findAll('[data-testid="pipeline-form-ci-variable-value"]');
const findErrorAlert = () => wrapper.find('[data-testid="run-pipeline-error-alert"]');
@@ -102,6 +103,8 @@ describe('Pipeline New Form', () => {
});
it('displays the correct values for the provided query params', async () => {
+ expect(findDropdowns().at(0).props('text')).toBe('Variable');
+ expect(findDropdowns().at(1).props('text')).toBe('File');
expect(findRefsDropdown().props('value')).toEqual({ shortName: 'tag-1' });
expect(findVariableRows()).toHaveLength(3);
});
@@ -114,6 +117,7 @@ describe('Pipeline New Form', () => {
it('displays an empty variable for the user to fill out', async () => {
expect(findKeyInputs().at(2).element.value).toBe('');
expect(findValueInputs().at(2).element.value).toBe('');
+ expect(findDropdowns().at(2).props('text')).toBe('Variable');
});
it('does not display remove icon for last row', () => {
diff --git a/spec/frontend/pipeline_schedules/components/take_ownership_modal_spec.js b/spec/frontend/pipeline_schedules/components/take_ownership_modal_spec.js
new file mode 100644
index 00000000000..d787611fe8f
--- /dev/null
+++ b/spec/frontend/pipeline_schedules/components/take_ownership_modal_spec.js
@@ -0,0 +1,54 @@
+import { GlModal } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import TakeOwnershipModal from '~/pipeline_schedules/components/take_ownership_modal.vue';
+
+describe('Take ownership modal', () => {
+ let wrapper;
+ const url = `/root/job-log-tester/-/pipeline_schedules/3/take_ownership`;
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMountExtended(TakeOwnershipModal, {
+ propsData: {
+ ownershipUrl: url,
+ ...props,
+ },
+ });
+ };
+
+ const findModal = () => wrapper.findComponent(GlModal);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has a primary action set to a url and a post data-method', () => {
+ const actionPrimary = findModal().props('actionPrimary');
+
+ expect(actionPrimary.attributes).toEqual(
+ expect.objectContaining([
+ {
+ category: 'primary',
+ variant: 'confirm',
+ href: url,
+ 'data-method': 'post',
+ },
+ ]),
+ );
+ });
+
+ it('shows a take ownership message', () => {
+ expect(findModal().text()).toBe(
+ 'Only the owner of a pipeline schedule can make changes to it. Do you want to take ownership of this schedule?',
+ );
+ });
+
+ it('emits the cancel event when clicking on cancel', async () => {
+ findModal().vm.$emit('cancel');
+
+ expect(findModal().emitted('cancel')).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/pipeline_wizard/components/editor_spec.js b/spec/frontend/pipeline_wizard/components/editor_spec.js
index 446412a4f02..540a08d2c7f 100644
--- a/spec/frontend/pipeline_wizard/components/editor_spec.js
+++ b/spec/frontend/pipeline_wizard/components/editor_spec.js
@@ -42,7 +42,7 @@ describe('Pages Yaml Editor wrapper', () => {
it('does not cause the touch event to be emitted', () => {
wrapper.setProps({ doc });
- expect(wrapper.emitted('touch')).not.toBeTruthy();
+ expect(wrapper.emitted('touch')).toBeUndefined();
});
});
@@ -63,7 +63,7 @@ describe('Pages Yaml Editor wrapper', () => {
it('emits touch if content is changed in editor', async () => {
await wrapper.vm.editor.setValue('foo: boo');
- expect(wrapper.emitted('touch')).toBeTruthy();
+ expect(wrapper.emitted('touch')).toEqual([expect.any(Array)]);
});
});
});
diff --git a/spec/frontend/pipeline_wizard/components/step_spec.js b/spec/frontend/pipeline_wizard/components/step_spec.js
index aa87b1d0b04..00b57f95ccc 100644
--- a/spec/frontend/pipeline_wizard/components/step_spec.js
+++ b/spec/frontend/pipeline_wizard/components/step_spec.js
@@ -139,7 +139,7 @@ describe('Pipeline Wizard - Step Page', () => {
await mockPrevClick();
await nextTick();
- expect(wrapper.emitted().back).toBeTruthy();
+ expect(wrapper.emitted().back).toEqual(expect.arrayContaining([]));
});
it('lets "next" event bubble upwards', async () => {
@@ -148,7 +148,7 @@ describe('Pipeline Wizard - Step Page', () => {
await mockNextClick();
await nextTick();
- expect(wrapper.emitted().next).toBeTruthy();
+ expect(wrapper.emitted().next).toEqual(expect.arrayContaining([]));
});
});
diff --git a/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js b/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js
index 43719595c5c..b8e194015b0 100644
--- a/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js
+++ b/spec/frontend/pipeline_wizard/components/widgets/checklist_spec.js
@@ -1,4 +1,4 @@
-import { GlFormCheckbox, GlFormCheckboxGroup } from '@gitlab/ui';
+import { GlFormCheckbox, GlFormGroup, GlFormCheckboxGroup } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ChecklistWidget from '~/pipeline_wizard/components/widgets/checklist.vue';
@@ -21,6 +21,7 @@ describe('Pipeline Wizard - Checklist Widget', () => {
return eventArray[eventArray.length - 1];
};
const findItem = (atIndex = 0) => wrapper.findAllComponents(GlFormCheckbox).at(atIndex);
+ const getGlFormGroup = () => wrapper.getComponent(GlFormGroup);
const getGlFormCheckboxGroup = () => wrapper.getComponent(GlFormCheckboxGroup);
// The item.ids *can* be passed inside props.items, but are usually
@@ -57,6 +58,16 @@ describe('Pipeline Wizard - Checklist Widget', () => {
expect(findItem().text()).toBe(props.items[0]);
});
+ it('assigns the same non-null value to label-for and form id', () => {
+ createComponent();
+ const formGroupLabelFor = getGlFormGroup().attributes('label-for');
+ const formCheckboxGroupId = getGlFormCheckboxGroup().attributes('id');
+
+ expect(formGroupLabelFor).not.toBeNull();
+ expect(formCheckboxGroupId).not.toBeNull();
+ expect(formGroupLabelFor).toBe(formCheckboxGroupId);
+ });
+
it('displays an item with a help text', () => {
createComponent();
const { text, help } = props.items[1];
diff --git a/spec/frontend/pipelines/components/pipeline_tabs_spec.js b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
index e0210307823..3680d9d62c7 100644
--- a/spec/frontend/pipelines/components/pipeline_tabs_spec.js
+++ b/spec/frontend/pipelines/components/pipeline_tabs_spec.js
@@ -24,12 +24,14 @@ describe('The Pipeline Tabs', () => {
const findFailedJobsBadge = () => wrapper.findByTestId('failed-builds-counter');
const findJobsBadge = () => wrapper.findByTestId('builds-counter');
+ const findTestsBadge = () => wrapper.findByTestId('tests-counter');
const defaultProvide = {
defaultTabValue: '',
failedJobsCount: 1,
failedJobsSummary: [],
totalJobCount: 10,
+ testsCount: 123,
};
const createComponent = (provide = {}) => {
@@ -41,7 +43,6 @@ describe('The Pipeline Tabs', () => {
},
stubs: {
GlTab,
- TestReports: { template: '<div id="tests" />' },
},
}),
);
@@ -82,6 +83,7 @@ describe('The Pipeline Tabs', () => {
tabName | badgeComponent | badgeText
${'Jobs'} | ${findJobsBadge} | ${String(defaultProvide.totalJobCount)}
${'Failed Jobs'} | ${findFailedJobsBadge} | ${String(defaultProvide.failedJobsCount)}
+ ${'Tests'} | ${findTestsBadge} | ${String(defaultProvide.testsCount)}
`('shows badge for $tabName with the correct text', ({ badgeComponent, badgeText }) => {
createComponent();
diff --git a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
index 6c743f92116..f958f12acd4 100644
--- a/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_filtered_search_spec.js
@@ -102,7 +102,7 @@ describe('Pipelines filtered search', () => {
it('emits filterPipelines on submit with correct filter', () => {
findFilteredSearch().vm.$emit('submit', mockSearch);
- expect(wrapper.emitted('filterPipelines')).toBeTruthy();
+ expect(wrapper.emitted('filterPipelines')).toHaveLength(1);
expect(wrapper.emitted('filterPipelines')[0]).toEqual([mockSearch]);
});
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
index 1ff32b03344..e712cdeaea2 100644
--- a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
@@ -1,4 +1,5 @@
import { GlDropdown } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
@@ -61,11 +62,10 @@ describe('Pipelines stage component', () => {
const findMergeTrainWarning = () => wrapper.find('[data-testid="warning-message-merge-trains"]');
const findLoadingState = () => wrapper.find('[data-testid="pipeline-stage-loading-state"]');
- const openStageDropdown = () => {
- findDropdownToggle().trigger('click');
- return new Promise((resolve) => {
- wrapper.vm.$root.$on('bv::dropdown::show', resolve);
- });
+ const openStageDropdown = async () => {
+ await findDropdownToggle().trigger('click');
+ await waitForPromises();
+ await nextTick();
};
describe('loading state', () => {
@@ -77,7 +77,10 @@ describe('Pipelines stage component', () => {
await openStageDropdown();
});
- it('displays loading state while jobs are being fetched', () => {
+ it('displays loading state while jobs are being fetched', async () => {
+ jest.runOnlyPendingTimers();
+ await nextTick();
+
expect(findLoadingState().exists()).toBe(true);
expect(findLoadingState().text()).toBe(PipelineStage.i18n.loadingText);
});
@@ -98,46 +101,41 @@ describe('Pipelines stage component', () => {
expect(glTooltipDirectiveMock.mock.calls[0][1].modifiers.ds0).toBe(true);
});
- it('should render a dropdown with the status icon', () => {
+ it('renders a dropdown with the status icon', () => {
expect(findDropdown().exists()).toBe(true);
expect(findDropdownToggle().exists()).toBe(true);
expect(findCiIcon().exists()).toBe(true);
});
- it('should render a borderless ci-icon', () => {
+ it('renders a borderless ci-icon', () => {
expect(findCiIcon().exists()).toBe(true);
expect(findCiIcon().props('isBorderless')).toBe(true);
expect(findCiIcon().classes('borderless')).toBe(true);
});
- it('should render a ci-icon with a custom border class', () => {
+ it('renders a ci-icon with a custom border class', () => {
expect(findCiIcon().exists()).toBe(true);
expect(findCiIcon().classes('gl-border')).toBe(true);
});
});
- describe('when update dropdown is changed', () => {
- beforeEach(() => {
- createComponent();
- });
- });
-
describe('when user opens dropdown and stage request is successful', () => {
beforeEach(async () => {
mock.onGet(dropdownPath).reply(200, stageReply);
createComponent();
await openStageDropdown();
+ await jest.runAllTimers();
await axios.waitForAll();
});
- it('should render the received data and emit `clickedDropdown` event', async () => {
+ it('renders the received data and emit `clickedDropdown` event', async () => {
expect(findDropdownMenu().text()).toContain(stageReply.latest_statuses[0].name);
expect(findDropdownMenuTitle().text()).toContain(stageReply.name);
expect(eventHub.$emit).toHaveBeenCalledWith('clickedDropdown');
});
- it('should refresh when updateDropdown is set to true', async () => {
+ it('refreshes when updateDropdown is set to true', async () => {
expect(mock.history.get).toHaveLength(1);
wrapper.setProps({ updateDropdown: true });
@@ -148,15 +146,14 @@ describe('Pipelines stage component', () => {
});
describe('when user opens dropdown and stage request fails', () => {
- beforeEach(async () => {
+ it('should close the dropdown', async () => {
mock.onGet(dropdownPath).reply(500);
createComponent();
await openStageDropdown();
await axios.waitForAll();
- });
+ await waitForPromises();
- it('should close the dropdown', () => {
expect(findDropdown().classes('show')).toBe(false);
});
});
@@ -181,26 +178,29 @@ describe('Pipelines stage component', () => {
it('should update the stage to request the new endpoint provided', async () => {
await openStageDropdown();
- await axios.waitForAll();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
expect(findDropdownMenu().text()).toContain('this is the updated content');
});
});
describe('pipelineActionRequestComplete', () => {
- beforeEach(() => {
+ beforeEach(async () => {
mock.onGet(dropdownPath).reply(200, stageReply);
mock.onPost(`${stageReply.latest_statuses[0].status.action.path}.json`).reply(200);
createComponent();
+ await waitForPromises();
+ await nextTick();
});
const clickCiAction = async () => {
await openStageDropdown();
- await axios.waitForAll();
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
- findCiActionBtn().trigger('click');
- await axios.waitForAll();
+ await findCiActionBtn().trigger('click');
};
it('closes dropdown when job item action is clicked', async () => {
@@ -211,29 +211,30 @@ describe('Pipelines stage component', () => {
expect(hidden).toHaveBeenCalledTimes(0);
await clickCiAction();
+ await waitForPromises();
expect(hidden).toHaveBeenCalledTimes(1);
});
it('emits `pipelineActionRequestComplete` when job item action is clicked', async () => {
await clickCiAction();
+ await waitForPromises();
expect(wrapper.emitted('pipelineActionRequestComplete')).toHaveLength(1);
});
});
describe('With merge trains enabled', () => {
- beforeEach(async () => {
+ it('shows a warning on the dropdown', async () => {
mock.onGet(dropdownPath).reply(200, stageReply);
createComponent({
isMergeTrain: true,
});
await openStageDropdown();
- await axios.waitForAll();
- });
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
- it('shows a warning on the dropdown', () => {
const warning = findMergeTrainWarning();
expect(warning.text()).toBe('Merge train pipeline jobs can not be retried');
diff --git a/spec/frontend/pipelines/graph/linked_pipeline_spec.js b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
index cdeaa0db61d..7d1e4774a24 100644
--- a/spec/frontend/pipelines/graph/linked_pipeline_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipeline_spec.js
@@ -426,7 +426,7 @@ describe('Linked pipeline', () => {
jest.spyOn(wrapper.vm, '$emit');
findButton().trigger('click');
- expect(wrapper.emitted().pipelineClicked).toBeTruthy();
+ expect(wrapper.emitted().pipelineClicked).toHaveLength(1);
});
it(`should emit ${BV_HIDE_TOOLTIP} to close the tooltip`, () => {
diff --git a/spec/frontend/pipelines/performance_insights_modal_spec.js b/spec/frontend/pipelines/performance_insights_modal_spec.js
index b745eb1d78e..8c802be7718 100644
--- a/spec/frontend/pipelines/performance_insights_modal_spec.js
+++ b/spec/frontend/pipelines/performance_insights_modal_spec.js
@@ -20,6 +20,7 @@ describe('Performance insights modal', () => {
const findModal = () => wrapper.findComponent(GlModal);
const findAlert = () => wrapper.findComponent(GlAlert);
const findLink = () => wrapper.findComponent(GlLink);
+ const findLimitText = () => wrapper.findByTestId('limit-alert-text');
const findQueuedCardData = () => wrapper.findByTestId('insights-queued-card-data');
const findQueuedCardLink = () => wrapper.findByTestId('insights-queued-card-link');
const findExecutedCardData = () => wrapper.findByTestId('insights-executed-card-data');
@@ -62,8 +63,19 @@ describe('Performance insights modal', () => {
expect(findModal().exists()).toBe(true);
});
- it('does not dispaly alert', () => {
- expect(findAlert().exists()).toBe(false);
+ it('displays alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('displays feedback issue link', () => {
+ expect(findLink().text()).toBe('Feedback issue');
+ expect(findLink().attributes('href')).toBe(
+ 'https://gitlab.com/gitlab-org/gitlab/-/issues/365902',
+ );
+ });
+
+ it('does not display limit text', () => {
+ expect(findLimitText().exists()).toBe(false);
});
describe('queued duration card', () => {
@@ -107,16 +119,13 @@ describe('Performance insights modal', () => {
});
});
- describe('limit alert', () => {
- it('displays limit alert when there is a next page', async () => {
+ describe('with next page', () => {
+ it('displays limit text when there is a next page', async () => {
createComponent([[getPerformanceInsights, getPerformanceInsightsNextPageHandler]]);
await waitForPromises();
- expect(findAlert().exists()).toBe(true);
- expect(findLink().attributes('href')).toBe(
- 'https://gitlab.com/gitlab-org/gitlab/-/issues/365902',
- );
+ expect(findLimitText().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/pipelines/pipeline_multi_actions_spec.js b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
index e24d2e51f08..f554166da33 100644
--- a/spec/frontend/pipelines/pipeline_multi_actions_spec.js
+++ b/spec/frontend/pipelines/pipeline_multi_actions_spec.js
@@ -84,13 +84,22 @@ describe('Pipeline Multi Actions Dropdown', () => {
expect(wrapper.vm.artifacts).toEqual(artifacts);
});
- it('should render all the provided artifacts', () => {
- createComponent({ mockData: { artifacts } });
+ it('should render all the provided artifacts when search query is empty', () => {
+ const searchQuery = '';
+ createComponent({ mockData: { searchQuery, artifacts } });
expect(findAllArtifactItems()).toHaveLength(artifacts.length);
expect(findEmptyMessage().exists()).toBe(false);
});
+ it('should render filtered artifacts when search query is not empty', () => {
+ const searchQuery = 'job-2';
+ createComponent({ mockData: { searchQuery, artifacts } });
+
+ expect(findAllArtifactItems()).toHaveLength(1);
+ expect(findEmptyMessage().exists()).toBe(false);
+ });
+
it('should render the correct artifact name and path', () => {
createComponent({ mockData: { artifacts } });
diff --git a/spec/frontend/pipelines/pipeline_url_spec.js b/spec/frontend/pipelines/pipeline_url_spec.js
index c6104a13216..25a97ecf49d 100644
--- a/spec/frontend/pipelines/pipeline_url_spec.js
+++ b/spec/frontend/pipelines/pipeline_url_spec.js
@@ -61,14 +61,14 @@ describe('Pipeline Url Component', () => {
describe('commit user avatar', () => {
it('renders when commit author exists', () => {
const pipelineBranch = mockPipelineBranch();
- const { avatar_url, name, path } = pipelineBranch.pipeline.commit.author;
+ const { avatar_url: imgSrc, name, path } = pipelineBranch.pipeline.commit.author;
createComponent(pipelineBranch);
const component = wrapper.findComponent(UserAvatarLink);
expect(component.exists()).toBe(true);
expect(component.props()).toMatchObject({
imgSize: 16,
- imgSrc: avatar_url,
+ imgSrc,
imgAlt: name,
linkHref: path,
tooltipText: name,
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index ad6d650670a..0bed24e588e 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -45,6 +45,7 @@ describe('Pipelines', () => {
ciLintPath: '/ci/lint',
resetCachePath: `${mockProjectPath}/settings/ci_cd/reset_cache`,
newPipelinePath: `${mockProjectPath}/pipelines/new`,
+
ciRunnerSettingsPath: `${mockProjectPath}/-/settings/ci_cd#js-runners-settings`,
};
@@ -654,7 +655,12 @@ describe('Pipelines', () => {
// Mock init a polling cycle
wrapper.vm.poll.options.notificationCallback(true);
- findStagesDropdownToggle().trigger('click');
+ await findStagesDropdownToggle().trigger('click');
+ jest.runOnlyPendingTimers();
+
+ // cancelMock is getting overwritten in pipelines_service.js#L29
+ // so we have to spy on it again here
+ cancelMock = jest.spyOn(wrapper.vm.service.cancelationSource, 'cancel');
await waitForPromises();
@@ -664,7 +670,8 @@ describe('Pipelines', () => {
});
it('stops polling & restarts polling', async () => {
- findStagesDropdownToggle().trigger('click');
+ await findStagesDropdownToggle().trigger('click');
+ jest.runOnlyPendingTimers();
await waitForPromises();
expect(cancelMock).not.toHaveBeenCalled();
diff --git a/spec/frontend/pipelines/test_reports/test_reports_spec.js b/spec/frontend/pipelines/test_reports/test_reports_spec.js
index 3c3143b1865..9b9ee4172f9 100644
--- a/spec/frontend/pipelines/test_reports/test_reports_spec.js
+++ b/spec/frontend/pipelines/test_reports/test_reports_spec.js
@@ -94,8 +94,8 @@ describe('Test reports app', () => {
beforeEach(() => createComponent());
it('sets testReports and shows tests', () => {
- expect(wrapper.vm.testReports).toBeTruthy();
- expect(wrapper.vm.showTests).toBeTruthy();
+ expect(wrapper.vm.testReports).toEqual(expect.any(Object));
+ expect(wrapper.vm.showTests).toBe(true);
});
it('shows tests details', () => {
diff --git a/spec/frontend/projects/commits/components/author_select_spec.js b/spec/frontend/projects/commits/components/author_select_spec.js
index d11090cba8a..57e5ef0ed1d 100644
--- a/spec/frontend/projects/commits/components/author_select_spec.js
+++ b/spec/frontend/projects/commits/components/author_select_spec.js
@@ -71,7 +71,7 @@ describe('Author Select', () => {
wrapper.setData({ hasSearchParam: true });
await nextTick();
- expect(findDropdownContainer().attributes('disabled')).toBeFalsy();
+ expect(findDropdownContainer().attributes('disabled')).toBeUndefined();
});
it('has correct tooltip message', async () => {
@@ -91,13 +91,13 @@ describe('Author Select', () => {
wrapper.setData({ hasSearchParam: false });
await nextTick();
- expect(findDropdown().attributes('disabled')).toBeFalsy();
+ expect(findDropdown().attributes('disabled')).toBeUndefined();
});
it('hasSearchParam if user types a truthy string', () => {
wrapper.vm.setSearchParam('false');
- expect(wrapper.vm.hasSearchParam).toBeTruthy();
+ expect(wrapper.vm.hasSearchParam).toBe(true);
});
});
@@ -153,9 +153,9 @@ describe('Author Select', () => {
});
it('has the correct props', async () => {
- const [{ avatar_url, username }] = authors;
+ const [{ avatar_url: avatarUrl, username }] = authors;
const result = {
- avatarUrl: avatar_url,
+ avatarUrl,
secondaryText: username,
isChecked: true,
};
diff --git a/spec/frontend/projects/compare/components/app_spec.js b/spec/frontend/projects/compare/components/app_spec.js
index 18e7f2e0f6e..c9ffdf20c32 100644
--- a/spec/frontend/projects/compare/components/app_spec.js
+++ b/spec/frontend/projects/compare/components/app_spec.js
@@ -34,7 +34,8 @@ describe('CompareApp component', () => {
expect(wrapper.props()).toEqual(
expect.objectContaining({
projectCompareIndexPath: defaultProps.projectCompareIndexPath,
- refsProjectPath: defaultProps.refsProjectPath,
+ sourceProjectRefsPath: defaultProps.sourceProjectRefsPath,
+ targetProjectRefsPath: defaultProps.targetProjectRefsPath,
paramsFrom: defaultProps.paramsFrom,
paramsTo: defaultProps.paramsTo,
}),
diff --git a/spec/frontend/projects/compare/components/mock_data.js b/spec/frontend/projects/compare/components/mock_data.js
index 61309928c26..81d64469a2a 100644
--- a/spec/frontend/projects/compare/components/mock_data.js
+++ b/spec/frontend/projects/compare/components/mock_data.js
@@ -1,7 +1,12 @@
-const refsProjectPath = 'some/refs/path';
+const sourceProjectRefsPath = 'some/refs/path';
+const targetProjectRefsPath = 'some/refs/path';
const paramsName = 'to';
const paramsBranch = 'main';
-const defaultProject = {
+const sourceProject = {
+ name: 'some-to-name',
+ id: '2',
+};
+const targetProject = {
name: 'some-to-name',
id: '1',
};
@@ -9,29 +14,31 @@ const defaultProject = {
export const appDefaultProps = {
projectCompareIndexPath: 'some/path',
projectMergeRequestPath: '',
- projects: [defaultProject],
+ projects: [sourceProject],
paramsFrom: 'main',
paramsTo: 'target/branch',
createMrPath: '',
- refsProjectPath,
- defaultProject,
+ sourceProjectRefsPath,
+ targetProjectRefsPath,
+ sourceProject,
+ targetProject,
};
export const revisionCardDefaultProps = {
- selectedProject: defaultProject,
+ selectedProject: targetProject,
paramsBranch,
revisionText: 'Source',
- refsProjectPath,
+ refsProjectPath: sourceProjectRefsPath,
paramsName,
};
export const repoDropdownDefaultProps = {
- selectedProject: defaultProject,
+ selectedProject: targetProject,
paramsName,
};
export const revisionDropdownDefaultProps = {
- refsProjectPath,
+ refsProjectPath: sourceProjectRefsPath,
paramsBranch,
paramsName,
};
diff --git a/spec/frontend/projects/project_new_spec.js b/spec/frontend/projects/project_new_spec.js
index 3034037fb1d..4fcecc3a307 100644
--- a/spec/frontend/projects/project_new_spec.js
+++ b/spec/frontend/projects/project_new_spec.js
@@ -1,6 +1,7 @@
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import { TEST_HOST } from 'helpers/test_constants';
import projectNew from '~/projects/project_new';
+import { mockTracking, triggerEvent, unmockTracking } from 'helpers/tracking_helper';
describe('New Project', () => {
let $projectImportUrl;
@@ -12,21 +13,27 @@ describe('New Project', () => {
beforeEach(() => {
setHTMLFixture(`
- <div class='toggle-import-form'>
- <div class='import-url-data'>
- <div class="form-group">
- <input id="project_import_url" />
- </div>
- <div id="import-url-auth-method">
- <div class="form-group">
- <input id="project-import-url-user" />
+ <div class="tab-pane active">
+ <div class='toggle-import-form'>
+ <form id="new_project">
+ <div class='import-url-data'>
+ <div class="form-group">
+ <input id="project_import_url" />
+ </div>
+ <div id="import-url-auth-method">
+ <div class="form-group">
+ <input id="project-import-url-user" />
+ </div>
+ <div class="form-group">
+ <input id="project_import_url_password" />
+ </div>
+ </div>
+ <input id="project_name" />
+ <input id="project_path" />
</div>
- <div class="form-group">
- <input id="project_import_url_password" />
- </div>
- </div>
- <input id="project_name" />
- <input id="project_path" />
+ <div class="js-user-readme-repo"></div>
+ <button class="js-create-project-button"/>
+ </form>
</div>
</div>
`);
@@ -45,6 +52,38 @@ describe('New Project', () => {
el.value = value;
};
+ describe('tracks manual path input', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking('_category_', undefined, jest.spyOn);
+ projectNew.bindEvents();
+ $projectPath.oldInputValue = '_old_value_';
+ });
+
+ afterEach(() => {
+ unmockTracking();
+ });
+
+ it('tracks the event', () => {
+ $projectPath.value = '_new_value_';
+
+ triggerEvent($projectPath, 'blur');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'user_input_path_slug', {
+ label: 'new_project_form',
+ });
+ });
+
+ it('does not track the event when there has been no change', () => {
+ $projectPath.value = '_old_value_';
+
+ triggerEvent($projectPath, 'blur');
+
+ expect(trackingSpy).not.toHaveBeenCalled();
+ });
+ });
+
describe('deriveProjectPathFromUrl', () => {
const dummyImportUrl = `${TEST_HOST}/dummy/import/url.git`;
diff --git a/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js b/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
index 5997c2a083c..79bce5a4b3f 100644
--- a/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/branch_dropdown_spec.js
@@ -1,6 +1,6 @@
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import { GlDropdown, GlSearchBoxByType, GlDropdownItem } from '@gitlab/ui';
+import { GlDropdown, GlSearchBoxByType, GlDropdownItem, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import BranchDropdown, {
i18n,
@@ -36,15 +36,20 @@ describe('Branch dropdown', () => {
await waitForPromises();
};
- const findGlDropdown = () => wrapper.find(GlDropdown);
- const findAllBranches = () => wrapper.findAll(GlDropdownItem);
+ const findGlDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllBranches = () => wrapper.findAllComponents(GlDropdownItem);
const findNoDataMsg = () => wrapper.findByTestId('no-data');
- const findGlSearchBoxByType = () => wrapper.find(GlSearchBoxByType);
+ const findGlSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
const findWildcardButton = () => wrapper.findByTestId('create-wildcard-button');
+ const findHelpText = () => wrapper.findComponent(GlSprintf);
const setSearchTerm = (searchTerm) => findGlSearchBoxByType().vm.$emit('input', searchTerm);
beforeEach(() => createComponent());
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
it('renders a GlDropdown component with the correct props', () => {
expect(findGlDropdown().props()).toMatchObject({ text: value });
});
@@ -85,6 +90,10 @@ describe('Branch dropdown', () => {
findWildcardButton().vm.$emit('click');
expect(wrapper.emitted('createWildcard')).toEqual([[searchTerm]]);
});
+
+ it('renders help text', () => {
+ expect(findHelpText().attributes('message')).toBe(i18n.branchHelpText);
+ });
});
it('displays an error message if fetch failed', async () => {
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js b/spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js
new file mode 100644
index 00000000000..3592fa50622
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/protections/index_spec.js
@@ -0,0 +1,57 @@
+import { nextTick } from 'vue';
+import { GlLink } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import Protections, {
+ i18n,
+} from '~/projects/settings/branch_rules/components/protections/index.vue';
+import PushProtections from '~/projects/settings/branch_rules/components/protections/push_protections.vue';
+import MergeProtections from '~/projects/settings/branch_rules/components/protections/merge_protections.vue';
+import { protections } from '../../mock_data';
+
+describe('Branch Protections', () => {
+ let wrapper;
+
+ const createComponent = async () => {
+ wrapper = mountExtended(Protections, {
+ propsData: { protections },
+ });
+ await nextTick();
+ };
+
+ const findHeading = () => wrapper.find('h4');
+ const findHelpText = () => wrapper.findByTestId('protections-help-text');
+ const findHelpLink = () => wrapper.findComponent(GlLink);
+ const findPushProtections = () => wrapper.findComponent(PushProtections);
+ const findMergeProtections = () => wrapper.findComponent(MergeProtections);
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a heading', () => {
+ expect(findHeading().text()).toBe(i18n.protections);
+ });
+
+ it('renders help text', () => {
+ expect(findHelpText().text()).toMatchInterpolatedText(i18n.protectionsHelpText);
+ expect(findHelpLink().attributes('href')).toBe('/help/user/project/protected_branches');
+ });
+
+ it('renders a PushProtections component with correct props', () => {
+ expect(findPushProtections().props('membersAllowedToPush')).toStrictEqual(
+ protections.membersAllowedToPush,
+ );
+ expect(findPushProtections().props('allowForcePush')).toBe(protections.allowForcePush);
+ });
+
+ it('renders a MergeProtections component with correct props', () => {
+ expect(findMergeProtections().props('membersAllowedToMerge')).toStrictEqual(
+ protections.membersAllowedToMerge,
+ );
+ expect(findMergeProtections().props('requireCodeOwnersApproval')).toBe(
+ protections.requireCodeOwnersApproval,
+ );
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js b/spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js
new file mode 100644
index 00000000000..0e168a2ad78
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/protections/merge_protections_spec.js
@@ -0,0 +1,53 @@
+import { GlFormGroup, GlFormCheckbox } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import MergeProtections, {
+ i18n,
+} from '~/projects/settings/branch_rules/components/protections/merge_protections.vue';
+import { membersAllowedToMerge, requireCodeOwnersApproval } from '../../mock_data';
+
+describe('Merge Protections', () => {
+ let wrapper;
+
+ const propsData = {
+ membersAllowedToMerge,
+ requireCodeOwnersApproval,
+ };
+
+ const createComponent = () => {
+ wrapper = mountExtended(MergeProtections, {
+ propsData,
+ });
+ };
+
+ const findFormGroup = () => wrapper.findComponent(GlFormGroup);
+ const findCodeOwnersApprovalCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a form group with the correct label', () => {
+ expect(findFormGroup().text()).toContain(i18n.allowedToMerge);
+ });
+
+ describe('Require code owners approval checkbox', () => {
+ it('renders a checkbox with the correct props', () => {
+ expect(findCodeOwnersApprovalCheckbox().vm.$attrs.checked).toBe(
+ propsData.requireCodeOwnersApproval,
+ );
+ });
+
+ it('renders help text', () => {
+ expect(findCodeOwnersApprovalCheckbox().text()).toContain(i18n.requireApprovalTitle);
+ expect(findCodeOwnersApprovalCheckbox().text()).toContain(i18n.requireApprovalHelpText);
+ });
+
+ it('emits a change-allow-force-push event when changed', () => {
+ findCodeOwnersApprovalCheckbox().vm.$emit('change', false);
+
+ expect(wrapper.emitted('change-require-code-owners-approval')[0]).toEqual([false]);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js b/spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js
new file mode 100644
index 00000000000..d54dad08338
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/components/protections/push_protections_spec.js
@@ -0,0 +1,50 @@
+import { GlFormGroup, GlSprintf, GlFormCheckbox } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import PushProtections, {
+ i18n,
+} from '~/projects/settings/branch_rules/components/protections/push_protections.vue';
+import { membersAllowedToPush, allowForcePush } from '../../mock_data';
+
+describe('Push Protections', () => {
+ let wrapper;
+ const propsData = {
+ membersAllowedToPush,
+ allowForcePush,
+ };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(PushProtections, {
+ propsData,
+ });
+ };
+
+ const findFormGroup = () => wrapper.findComponent(GlFormGroup);
+ const findAllowForcePushCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findHelpText = () => wrapper.findComponent(GlSprintf);
+
+ beforeEach(() => createComponent());
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a form group with the correct label', () => {
+ expect(findFormGroup().attributes('label')).toBe(i18n.allowedToPush);
+ });
+
+ describe('Allow force push checkbox', () => {
+ it('renders a checkbox with the correct props', () => {
+ expect(findAllowForcePushCheckbox().vm.$attrs.checked).toBe(propsData.allowForcePush);
+ });
+
+ it('renders help text', () => {
+ expect(findHelpText().attributes('message')).toBe(i18n.forcePushTitle);
+ });
+
+ it('emits a change-allow-force-push event when changed', () => {
+ findAllowForcePushCheckbox().vm.$emit('change', false);
+
+ expect(wrapper.emitted('change-allow-force-push')[0]).toEqual([false]);
+ });
+ });
+});
diff --git a/spec/frontend/projects/settings/branch_rules/mock_data.js b/spec/frontend/projects/settings/branch_rules/mock_data.js
new file mode 100644
index 00000000000..32cca027d19
--- /dev/null
+++ b/spec/frontend/projects/settings/branch_rules/mock_data.js
@@ -0,0 +1,10 @@
+export const membersAllowedToPush = ['Maintainers', 'Developers'];
+export const allowForcePush = false;
+export const membersAllowedToMerge = ['Maintainers'];
+export const requireCodeOwnersApproval = false;
+export const protections = {
+ membersAllowedToPush,
+ allowForcePush,
+ membersAllowedToMerge,
+ requireCodeOwnersApproval,
+};
diff --git a/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js b/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
index 66ae6ddc02d..b0b2b9191d4 100644
--- a/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
+++ b/spec/frontend/projects/settings/branch_rules/rule_edit_spec.js
@@ -3,9 +3,12 @@ import { getParameterByName } from '~/lib/utils/url_utility';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RuleEdit from '~/projects/settings/branch_rules/components/rule_edit.vue';
import BranchDropdown from '~/projects/settings/branch_rules/components/branch_dropdown.vue';
+import Protections from '~/projects/settings/branch_rules/components/protections/index.vue';
jest.mock('~/lib/utils/url_utility', () => ({
getParameterByName: jest.fn().mockImplementation(() => 'main'),
+ joinPaths: jest.fn(),
+ setUrlFragment: jest.fn(),
}));
describe('Edit branch rule', () => {
@@ -16,10 +19,15 @@ describe('Edit branch rule', () => {
wrapper = shallowMountExtended(RuleEdit, { propsData: { projectPath } });
};
- const findBranchDropdown = () => wrapper.find(BranchDropdown);
+ const findBranchDropdown = () => wrapper.findComponent(BranchDropdown);
+ const findProtections = () => wrapper.findComponent(Protections);
beforeEach(() => createComponent());
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
it('gets the branch param from url', () => {
expect(getParameterByName).toHaveBeenCalledWith('branch');
});
@@ -46,4 +54,55 @@ describe('Edit branch rule', () => {
expect(findBranchDropdown().props('value')).toBe(wildcard);
});
});
+
+ describe('Protections', () => {
+ it('renders a Protections component with the correct props', () => {
+ expect(findProtections().props('protections')).toMatchObject({
+ membersAllowedToPush: [],
+ allowForcePush: false,
+ membersAllowedToMerge: [],
+ requireCodeOwnersApproval: false,
+ });
+ });
+
+ it('updates protections when change-allowed-to-push-members is emitted', async () => {
+ const membersAllowedToPush = ['test'];
+ findProtections().vm.$emit('change-allowed-to-push-members', membersAllowedToPush);
+ await nextTick();
+
+ expect(findProtections().props('protections')).toEqual(
+ expect.objectContaining({ membersAllowedToPush }),
+ );
+ });
+
+ it('updates protections when change-allow-force-push is emitted', async () => {
+ const allowForcePush = true;
+ findProtections().vm.$emit('change-allow-force-push', allowForcePush);
+ await nextTick();
+
+ expect(findProtections().props('protections')).toEqual(
+ expect.objectContaining({ allowForcePush }),
+ );
+ });
+
+ it('updates protections when change-allowed-to-merge-members is emitted', async () => {
+ const membersAllowedToMerge = ['test'];
+ findProtections().vm.$emit('change-allowed-to-merge-members', membersAllowedToMerge);
+ await nextTick();
+
+ expect(findProtections().props('protections')).toEqual(
+ expect.objectContaining({ membersAllowedToMerge }),
+ );
+ });
+
+ it('updates protections when change-require-code-owners-approval is emitted', async () => {
+ const requireCodeOwnersApproval = true;
+ findProtections().vm.$emit('change-require-code-owners-approval', requireCodeOwnersApproval);
+ await nextTick();
+
+ expect(findProtections().props('protections')).toEqual(
+ expect.objectContaining({ requireCodeOwnersApproval }),
+ );
+ });
+ });
});
diff --git a/spec/frontend/projects/settings/components/transfer_project_form_spec.js b/spec/frontend/projects/settings/components/transfer_project_form_spec.js
index 85b09ced024..bde7148078d 100644
--- a/spec/frontend/projects/settings/components/transfer_project_form_spec.js
+++ b/spec/frontend/projects/settings/components/transfer_project_form_spec.js
@@ -1,11 +1,19 @@
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1 from 'test_fixtures/graphql/projects/settings/search_namespaces_where_user_can_transfer_projects_page_1.query.graphql.json';
+import searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2 from 'test_fixtures/graphql/projects/settings/search_namespaces_where_user_can_transfer_projects_page_2.query.graphql.json';
import {
groupNamespaces,
userNamespaces,
} from 'jest/vue_shared/components/namespace_select/mock_data';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import TransferProjectForm from '~/projects/settings/components/transfer_project_form.vue';
import NamespaceSelect from '~/vue_shared/components/namespace_select/namespace_select.vue';
import ConfirmDanger from '~/vue_shared/components/confirm_danger/confirm_danger.vue';
+import searchNamespacesWhereUserCanTransferProjectsQuery from '~/projects/settings/graphql/queries/search_namespaces_where_user_can_transfer_projects.query.graphql';
+import waitForPromises from 'helpers/wait_for_promises';
describe('Transfer project form', () => {
let wrapper;
@@ -13,36 +21,50 @@ describe('Transfer project form', () => {
const confirmButtonText = 'Confirm';
const confirmationPhrase = 'You must construct additional pylons!';
- const createComponent = () =>
- shallowMountExtended(TransferProjectForm, {
+ const runDebounce = () => jest.runAllTimers();
+
+ Vue.use(VueApollo);
+
+ const defaultQueryHandler = jest
+ .fn()
+ .mockResolvedValue(searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1);
+
+ const createComponent = ({
+ requestHandlers = [[searchNamespacesWhereUserCanTransferProjectsQuery, defaultQueryHandler]],
+ } = {}) => {
+ wrapper = shallowMountExtended(TransferProjectForm, {
propsData: {
userNamespaces,
groupNamespaces,
confirmButtonText,
confirmationPhrase,
},
+ apolloProvider: createMockApollo(requestHandlers),
});
+ };
const findNamespaceSelect = () => wrapper.findComponent(NamespaceSelect);
const findConfirmDanger = () => wrapper.findComponent(ConfirmDanger);
- beforeEach(() => {
- wrapper = createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
});
it('renders the namespace selector', () => {
+ createComponent();
+
expect(findNamespaceSelect().exists()).toBe(true);
});
it('renders the confirm button', () => {
+ createComponent();
+
expect(findConfirmDanger().exists()).toBe(true);
});
it('disables the confirm button by default', () => {
+ createComponent();
+
expect(findConfirmDanger().attributes('disabled')).toBe('true');
});
@@ -50,6 +72,8 @@ describe('Transfer project form', () => {
const [selectedItem] = groupNamespaces;
beforeEach(() => {
+ createComponent();
+
findNamespaceSelect().vm.$emit('select', selectedItem);
});
@@ -69,4 +93,132 @@ describe('Transfer project form', () => {
expect(wrapper.emitted('confirm')).toBeDefined();
});
});
+
+ it('passes correct props to `NamespaceSelect` component', async () => {
+ createComponent();
+
+ runDebounce();
+ await waitForPromises();
+
+ const {
+ namespace,
+ groups,
+ } = searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser;
+
+ expect(findNamespaceSelect().props()).toMatchObject({
+ userNamespaces: [
+ {
+ id: getIdFromGraphQLId(namespace.id),
+ humanName: namespace.fullName,
+ },
+ ],
+ groupNamespaces: groups.nodes.map((node) => ({
+ id: getIdFromGraphQLId(node.id),
+ humanName: node.fullName,
+ })),
+ hasNextPageOfGroups: true,
+ isLoadingMoreGroups: false,
+ isSearchLoading: false,
+ shouldFilterNamespaces: false,
+ });
+ });
+
+ describe('when `search` event is fired', () => {
+ const arrange = async () => {
+ createComponent();
+
+ findNamespaceSelect().vm.$emit('search', 'foo');
+
+ await nextTick();
+ };
+
+ it('sets `isSearchLoading` prop to `true`', async () => {
+ await arrange();
+
+ expect(findNamespaceSelect().props('isSearchLoading')).toBe(true);
+ });
+
+ it('passes `search` variable to query', async () => {
+ await arrange();
+
+ runDebounce();
+ await waitForPromises();
+
+ expect(defaultQueryHandler).toHaveBeenCalledWith(expect.objectContaining({ search: 'foo' }));
+ });
+ });
+
+ describe('when `load-more-groups` event is fired', () => {
+ let queryHandler;
+
+ const arrange = async () => {
+ queryHandler = jest.fn();
+ queryHandler.mockResolvedValueOnce(
+ searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1,
+ );
+ queryHandler.mockResolvedValueOnce(
+ searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2,
+ );
+
+ createComponent({
+ requestHandlers: [[searchNamespacesWhereUserCanTransferProjectsQuery, queryHandler]],
+ });
+
+ runDebounce();
+ await waitForPromises();
+
+ findNamespaceSelect().vm.$emit('load-more-groups');
+ await nextTick();
+ };
+
+ it('sets `isLoadingMoreGroups` prop to `true`', async () => {
+ await arrange();
+
+ expect(findNamespaceSelect().props('isLoadingMoreGroups')).toBe(true);
+ });
+
+ it('passes `after` and `first` variables to query', async () => {
+ await arrange();
+
+ runDebounce();
+ await waitForPromises();
+
+ expect(queryHandler).toHaveBeenCalledWith(
+ expect.objectContaining({
+ first: 25,
+ after:
+ searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser.groups
+ .pageInfo.endCursor,
+ }),
+ );
+ });
+
+ it('updates `groupNamespaces` prop with new groups', async () => {
+ await arrange();
+
+ runDebounce();
+ await waitForPromises();
+
+ expect(findNamespaceSelect().props('groupNamespaces')).toEqual(
+ [
+ ...searchNamespacesWhereUserCanTransferProjectsQueryResponsePage1.data.currentUser.groups
+ .nodes,
+ ...searchNamespacesWhereUserCanTransferProjectsQueryResponsePage2.data.currentUser.groups
+ .nodes,
+ ].map((node) => ({
+ id: getIdFromGraphQLId(node.id),
+ humanName: node.fullName,
+ })),
+ );
+ });
+
+ it('updates `hasNextPageOfGroups` prop', async () => {
+ await arrange();
+
+ runDebounce();
+ await waitForPromises();
+
+ expect(findNamespaceSelect().props('hasNextPageOfGroups')).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/prometheus_metrics/custom_metrics_spec.js b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
index fc906194059..a079b0b97fd 100644
--- a/spec/frontend/prometheus_metrics/custom_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
@@ -50,39 +50,33 @@ describe('PrometheusMetrics', () => {
customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LOADING);
expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toEqual(false);
- expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
- expect(
- customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
- ).toBeTruthy();
-
- expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toBe(true);
+
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBe(true);
});
it('should show metrics list when called with `list`', () => {
customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LIST);
- expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBe(true);
expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
- expect(
- customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
- ).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toBe(true);
expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
- expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBe(true);
});
it('should show empty state when called with `empty`', () => {
customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
- expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBe(true);
expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
- expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
- expect(
- customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
- ).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toBe(true);
expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toEqual(false);
@@ -94,14 +88,12 @@ describe('PrometheusMetrics', () => {
const $metricsListLi = customMetrics.$monitoredCustomMetricsList.find('li');
- expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBe(true);
expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
- expect(
- customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
- ).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toBe(true);
expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
- expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBe(true);
expect($metricsListLi.length).toEqual(metrics.length);
});
@@ -114,10 +106,10 @@ describe('PrometheusMetrics', () => {
false,
);
- expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
- expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBe(true);
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBe(true);
});
});
});
diff --git a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
index 0df2aad5882..a65cbe1a47a 100644
--- a/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/prometheus_metrics_spec.js
@@ -54,25 +54,25 @@ describe('PrometheusMetrics', () => {
it('should show loading state when called with `loading`', () => {
prometheusMetrics.showMonitoringMetricsPanelState(PANEL_STATE.LOADING);
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeFalsy();
- expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBeTruthy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(false);
+ expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBe(true);
});
it('should show metrics list when called with `list`', () => {
prometheusMetrics.showMonitoringMetricsPanelState(PANEL_STATE.LIST);
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBeFalsy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBe(false);
});
it('should show empty state when called with `empty`', () => {
prometheusMetrics.showMonitoringMetricsPanelState(PANEL_STATE.EMPTY);
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBeFalsy();
- expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBeTruthy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBe(false);
+ expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBe(true);
});
});
@@ -88,8 +88,8 @@ describe('PrometheusMetrics', () => {
const $metricsListLi = prometheusMetrics.$monitoredMetricsList.find('li');
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBeFalsy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsList.hasClass('hidden')).toBe(false);
expect(prometheusMetrics.$monitoredMetricsCount.text()).toEqual(
'3 exporters with 12 metrics were found',
@@ -102,8 +102,8 @@ describe('PrometheusMetrics', () => {
it('should show missing environment variables list', () => {
prometheusMetrics.populateActiveMetrics(missingVarMetrics);
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$missingEnvVarPanel.hasClass('hidden')).toBeFalsy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$missingEnvVarPanel.hasClass('hidden')).toBe(false);
expect(prometheusMetrics.$missingEnvVarMetricCount.text()).toEqual('2');
expect(prometheusMetrics.$missingEnvVarPanel.find('li').length).toEqual(2);
@@ -143,12 +143,12 @@ describe('PrometheusMetrics', () => {
prometheusMetrics.loadActiveMetrics();
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeFalsy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(false);
expect(axios.get).toHaveBeenCalledWith(prometheusMetrics.activeMetricsEndpoint);
await waitForPromises();
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
});
it('should show empty state if response failed to load', async () => {
@@ -158,8 +158,8 @@ describe('PrometheusMetrics', () => {
await waitForPromises();
- expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBeFalsy();
+ expect(prometheusMetrics.$monitoredMetricsLoading.hasClass('hidden')).toBe(true);
+ expect(prometheusMetrics.$monitoredMetricsEmpty.hasClass('hidden')).toBe(false);
});
it('should populate metrics list once response is loaded', async () => {
diff --git a/spec/frontend/releases/__snapshots__/util_spec.js.snap b/spec/frontend/releases/__snapshots__/util_spec.js.snap
index 90a33152877..55e3dda60a0 100644
--- a/spec/frontend/releases/__snapshots__/util_spec.js.snap
+++ b/spec/frontend/releases/__snapshots__/util_spec.js.snap
@@ -55,6 +55,7 @@ Object {
"commitPath": "http://localhost/releases-namespace/releases-project/-/commit/b83d6e391c22777fca1ed3012fce84f633d7fed0",
"descriptionHtml": "<p data-sourcepos=\\"1:1-1:23\\" dir=\\"auto\\">An okay release <gl-emoji title=\\"shrug\\" data-name=\\"shrug\\" data-unicode-version=\\"9.0\\">🤷</gl-emoji></p>",
"evidences": Array [],
+ "historicalRelease": false,
"milestones": Array [],
"name": "The second release",
"releasedAt": 2019-01-10T00:00:00.000Z,
@@ -159,6 +160,7 @@ Object {
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
+ "historicalRelease": false,
"milestones": Array [
Object {
"__typename": "Milestone",
@@ -208,6 +210,7 @@ exports[`releases/util.js convertOneReleaseForEditingGraphQLResponse matches sna
Object {
"data": Object {
"_links": Object {
+ "__typename": "ReleaseLinks",
"self": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
"selfUrl": "http://localhost/releases-namespace/releases-project/-/releases/v1.1",
},
@@ -215,6 +218,7 @@ Object {
"count": undefined,
"links": Array [
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetPath": "/binaries/awesome-app-3",
"id": "gid://gitlab/Releases::Link/13",
"linkType": "image",
@@ -222,6 +226,7 @@ Object {
"url": "https://example.com/image",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetPath": "/binaries/awesome-app-2",
"id": "gid://gitlab/Releases::Link/12",
"linkType": "package",
@@ -229,6 +234,7 @@ Object {
"url": "https://example.com/package",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetPath": "/binaries/awesome-app-1",
"id": "gid://gitlab/Releases::Link/11",
"linkType": "runbook",
@@ -236,6 +242,7 @@ Object {
"url": "http://localhost/releases-namespace/releases-project/runbook",
},
Object {
+ "__typename": "ReleaseAssetLink",
"directAssetPath": "/binaries/linux-amd64",
"id": "gid://gitlab/Releases::Link/10",
"linkType": "other",
@@ -250,6 +257,7 @@ Object {
"evidences": Array [],
"milestones": Array [
Object {
+ "__typename": "Milestone",
"id": "gid://gitlab/Milestone/123",
"issueStats": Object {},
"stats": undefined,
@@ -258,6 +266,7 @@ Object {
"webUrl": undefined,
},
Object {
+ "__typename": "Milestone",
"id": "gid://gitlab/Milestone/124",
"issueStats": Object {},
"stats": undefined,
@@ -373,6 +382,7 @@ Object {
"sha": "760d6cdfb0879c3ffedec13af470e0f71cf52c6cde4d",
},
],
+ "historicalRelease": false,
"milestones": Array [
Object {
"__typename": "Milestone",
diff --git a/spec/frontend/releases/components/release_block_header_spec.js b/spec/frontend/releases/components/release_block_header_spec.js
index 167ae4f32a2..c9921185bad 100644
--- a/spec/frontend/releases/components/release_block_header_spec.js
+++ b/spec/frontend/releases/components/release_block_header_spec.js
@@ -1,8 +1,9 @@
-import { GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlLink, GlBadge } from '@gitlab/ui';
import { merge } from 'lodash';
import originalRelease from 'test_fixtures/api/releases/release.json';
import setWindowLocation from 'helpers/set_window_location_helper';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleaseBlockHeader from '~/releases/components/release_block_header.vue';
import { BACK_URL_PARAM } from '~/releases/constants';
@@ -12,10 +13,11 @@ describe('Release block header', () => {
let release;
const factory = (releaseUpdates = {}) => {
- wrapper = shallowMount(ReleaseBlockHeader, {
+ wrapper = shallowMountExtended(ReleaseBlockHeader, {
propsData: {
release: merge({}, release, releaseUpdates),
},
+ stubs: { GlBadge },
});
};
@@ -30,6 +32,7 @@ describe('Release block header', () => {
const findHeader = () => wrapper.find('h2');
const findHeaderLink = () => findHeader().find(GlLink);
const findEditButton = () => wrapper.find('.js-edit-button');
+ const findBadge = () => wrapper.findComponent(GlBadge);
describe('when _links.self is provided', () => {
beforeEach(() => {
@@ -84,4 +87,34 @@ describe('Release block header', () => {
expect(findEditButton().exists()).toBe(false);
});
});
+
+ describe('upcoming release', () => {
+ beforeEach(() => {
+ factory({ upcomingRelease: true, historicalRelease: false });
+ });
+
+ it('shows a badge that the release is upcoming', () => {
+ const badge = findBadge();
+
+ expect(badge.text()).toBe(__('Upcoming Release'));
+ expect(badge.props('variant')).toBe('warning');
+ });
+ });
+
+ describe('historical release', () => {
+ beforeEach(() => {
+ factory({ upcomingRelease: false, historicalRelease: true });
+ });
+
+ it('shows a badge that the release is historical', () => {
+ const badge = findBadge();
+
+ expect(badge.text()).toBe(__('Historical release'));
+ expect(badge.attributes('title')).toBe(
+ __(
+ 'This release was created with a date in the past. Evidence collection at the moment of the release is unavailable.',
+ ),
+ );
+ });
+ });
});
diff --git a/spec/frontend/reports/components/report_section_spec.js b/spec/frontend/reports/components/report_section_spec.js
index 888b49f3e0c..bdfba8d6878 100644
--- a/spec/frontend/reports/components/report_section_spec.js
+++ b/spec/frontend/reports/components/report_section_spec.js
@@ -1,16 +1,15 @@
-import { mount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
-import mountComponent, { mountComponentWithSlots } from 'helpers/vue_mount_component_helper';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import { GlButton } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import HelpPopover from '~/vue_shared/components/help_popover.vue';
-import reportSection from '~/reports/components/report_section.vue';
+import ReportItem from '~/reports/components/report_item.vue';
+import ReportSection from '~/reports/components/report_section.vue';
-describe('Report section', () => {
- let vm;
+describe('ReportSection component', () => {
let wrapper;
- const ReportSection = Vue.extend(reportSection);
- const findCollapseButton = () => wrapper.findByTestId('report-section-expand-button');
+
+ const findButton = () => wrapper.findComponent(GlButton);
const findPopover = () => wrapper.findComponent(HelpPopover);
+ const findReportSection = () => wrapper.find('.js-report-section-container');
const resolvedIssues = [
{
@@ -33,34 +32,24 @@ describe('Report section', () => {
alwaysOpen: false,
};
- const createComponent = (props) => {
- wrapper = extendedWrapper(
- mount(reportSection, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- }),
- );
- return wrapper;
+ const createComponent = ({ props = {}, data = {}, slots = {} } = {}) => {
+ wrapper = mountExtended(ReportSection, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ data() {
+ return data;
+ },
+ slots,
+ });
};
afterEach(() => {
- if (vm) {
- vm.$destroy();
- vm = null;
- }
- if (wrapper) {
- wrapper.destroy();
- wrapper = null;
- }
+ wrapper.destroy();
});
describe('computed', () => {
- beforeEach(() => {
- vm = mountComponent(ReportSection, defaultProps);
- });
-
describe('isCollapsible', () => {
const testMatrix = [
{ hasIssues: false, alwaysOpen: false, isCollapsible: false },
@@ -73,12 +62,10 @@ describe('Report section', () => {
const issues = hasIssues ? 'has issues' : 'has no issues';
const open = alwaysOpen ? 'is always open' : 'is not always open';
- it(`is ${isCollapsible}, if the report ${issues} and ${open}`, async () => {
- vm.hasIssues = hasIssues;
- vm.alwaysOpen = alwaysOpen;
+ it(`is ${isCollapsible}, if the report ${issues} and ${open}`, () => {
+ createComponent({ props: { hasIssues, alwaysOpen } });
- await nextTick();
- expect(vm.isCollapsible).toBe(isCollapsible);
+ expect(wrapper.vm.isCollapsible).toBe(isCollapsible);
});
});
});
@@ -95,12 +82,10 @@ describe('Report section', () => {
const issues = isCollapsed ? 'is collapsed' : 'is not collapsed';
const open = alwaysOpen ? 'is always open' : 'is not always open';
- it(`is ${isExpanded}, if the report ${issues} and ${open}`, async () => {
- vm.isCollapsed = isCollapsed;
- vm.alwaysOpen = alwaysOpen;
+ it(`is ${isExpanded}, if the report ${issues} and ${open}`, () => {
+ createComponent({ props: { alwaysOpen }, data: { isCollapsed } });
- await nextTick();
- expect(vm.isExpanded).toBe(isExpanded);
+ expect(wrapper.vm.isExpanded).toBe(isExpanded);
});
});
});
@@ -108,110 +93,105 @@ describe('Report section', () => {
describe('when it is loading', () => {
it('should render loading indicator', () => {
- vm = mountComponent(ReportSection, {
- component: '',
- status: 'LOADING',
- loadingText: 'Loading Code Quality report',
- errorText: 'foo',
- successText: 'Code quality improved on 1 point and degraded on 1 point',
- hasIssues: false,
+ createComponent({
+ props: {
+ component: '',
+ status: 'LOADING',
+ loadingText: 'Loading Code Quality report',
+ errorText: 'foo',
+ successText: 'Code quality improved on 1 point and degraded on 1 point',
+ hasIssues: false,
+ },
});
- expect(vm.$el.textContent.trim()).toEqual('Loading Code Quality report');
+ expect(wrapper.text()).toBe('Loading Code Quality report');
});
});
describe('with success status', () => {
- beforeEach(() => {
- vm = mountComponent(ReportSection, {
- ...defaultProps,
- hasIssues: true,
- });
- });
-
it('should render provided data', () => {
- expect(vm.$el.querySelector('.js-code-text').textContent.trim()).toEqual(
- 'Code quality improved on 1 point and degraded on 1 point',
- );
+ createComponent({ props: { hasIssues: true } });
- expect(vm.$el.querySelectorAll('.report-block-container li').length).toEqual(
- resolvedIssues.length,
+ expect(wrapper.find('.js-code-text').text()).toBe(
+ 'Code quality improved on 1 point and degraded on 1 point',
);
+ expect(wrapper.findAllComponents(ReportItem)).toHaveLength(resolvedIssues.length);
});
describe('toggleCollapsed', () => {
- const hiddenCss = { display: 'none' };
-
it('toggles issues', async () => {
- vm.$el.querySelector('button').click();
+ createComponent({ props: { hasIssues: true } });
+
+ await findButton().trigger('click');
- await nextTick();
- expect(vm.$el.querySelector('.js-report-section-container')).not.toHaveCss(hiddenCss);
- expect(vm.$el.querySelector('button').textContent.trim()).toEqual('Collapse');
+ expect(findReportSection().isVisible()).toBe(true);
+ expect(findButton().text()).toBe('Collapse');
- vm.$el.querySelector('button').click();
+ await findButton().trigger('click');
- await nextTick();
- expect(vm.$el.querySelector('.js-report-section-container')).toHaveCss(hiddenCss);
- expect(vm.$el.querySelector('button').textContent.trim()).toEqual('Expand');
+ expect(findReportSection().isVisible()).toBe(false);
+ expect(findButton().text()).toBe('Expand');
});
- it('is always expanded, if always-open is set to true', async () => {
- vm.alwaysOpen = true;
- await nextTick();
- expect(vm.$el.querySelector('.js-report-section-container')).not.toHaveCss(hiddenCss);
- expect(vm.$el.querySelector('button')).toBeNull();
+ it('is always expanded, if always-open is set to true', () => {
+ createComponent({ props: { hasIssues: true, alwaysOpen: true } });
+
+ expect(findReportSection().isVisible()).toBe(true);
+ expect(findButton().exists()).toBe(false);
});
});
});
describe('snowplow events', () => {
- it('does emit an event on issue toggle if the shouldEmitToggleEvent prop does exist', async () => {
- createComponent({ hasIssues: true, shouldEmitToggleEvent: true });
+ it('does emit an event on issue toggle if the shouldEmitToggleEvent prop does exist', () => {
+ createComponent({ props: { hasIssues: true, shouldEmitToggleEvent: true } });
- expect(wrapper.emitted().toggleEvent).toBeUndefined();
+ expect(wrapper.emitted('toggleEvent')).toBeUndefined();
- findCollapseButton().trigger('click');
- await nextTick();
- expect(wrapper.emitted().toggleEvent).toHaveLength(1);
+ findButton().trigger('click');
+
+ expect(wrapper.emitted('toggleEvent')).toEqual([[]]);
});
- it('does not emit an event on issue toggle if the shouldEmitToggleEvent prop does not exist', async () => {
- createComponent({ hasIssues: true });
+ it('does not emit an event on issue toggle if the shouldEmitToggleEvent prop does not exist', () => {
+ createComponent({ props: { hasIssues: true } });
+
+ expect(wrapper.emitted('toggleEvent')).toBeUndefined();
- expect(wrapper.emitted().toggleEvent).toBeUndefined();
+ findButton().trigger('click');
- findCollapseButton().trigger('click');
- await nextTick();
- expect(wrapper.emitted().toggleEvent).toBeUndefined();
+ expect(wrapper.emitted('toggleEvent')).toBeUndefined();
});
- it('does not emit an event if always-open is set to true', async () => {
- createComponent({ alwaysOpen: true, hasIssues: true, shouldEmitToggleEvent: true });
+ it('does not emit an event if always-open is set to true', () => {
+ createComponent({
+ props: { alwaysOpen: true, hasIssues: true, shouldEmitToggleEvent: true },
+ });
- await nextTick();
- expect(wrapper.emitted().toggleEvent).toBeUndefined();
+ expect(wrapper.emitted('toggleEvent')).toBeUndefined();
});
});
describe('with failed request', () => {
it('should render error indicator', () => {
- vm = mountComponent(ReportSection, {
- component: '',
- status: 'ERROR',
- loadingText: 'Loading Code Quality report',
- errorText: 'Failed to load Code Quality report',
- successText: 'Code quality improved on 1 point and degraded on 1 point',
- hasIssues: false,
+ createComponent({
+ props: {
+ component: '',
+ status: 'ERROR',
+ loadingText: 'Loading Code Quality report',
+ errorText: 'Failed to load Code Quality report',
+ successText: 'Code quality improved on 1 point and degraded on 1 point',
+ hasIssues: false,
+ },
});
- expect(vm.$el.textContent.trim()).toEqual('Failed to load Code Quality report');
+ expect(wrapper.text()).toBe('Failed to load Code Quality report');
});
});
describe('with action buttons passed to the slot', () => {
beforeEach(() => {
- vm = mountComponentWithSlots(ReportSection, {
+ createComponent({
props: {
status: 'SUCCESS',
successText: 'success',
@@ -224,17 +204,17 @@ describe('Report section', () => {
});
it('should render the passed button', () => {
- expect(vm.$el.textContent.trim()).toContain('Action!');
+ expect(wrapper.text()).toContain('Action!');
});
it('should still render the expand/collapse button', () => {
- expect(vm.$el.querySelector('.js-collapse-btn').textContent.trim()).toEqual('Expand');
+ expect(findButton().text()).toBe('Expand');
});
});
describe('Success and Error slots', () => {
const createComponentWithSlots = (status) => {
- vm = mountComponentWithSlots(ReportSection, {
+ createComponent({
props: {
status,
hasIssues: true,
@@ -250,25 +230,25 @@ describe('Report section', () => {
it('only renders success slot when status is "SUCCESS"', () => {
createComponentWithSlots('SUCCESS');
- expect(vm.$el.textContent.trim()).toContain('This is a success');
- expect(vm.$el.textContent.trim()).not.toContain('This is an error');
- expect(vm.$el.textContent.trim()).not.toContain('This is loading');
+ expect(wrapper.text()).toContain('This is a success');
+ expect(wrapper.text()).not.toContain('This is an error');
+ expect(wrapper.text()).not.toContain('This is loading');
});
it('only renders error slot when status is "ERROR"', () => {
createComponentWithSlots('ERROR');
- expect(vm.$el.textContent.trim()).toContain('This is an error');
- expect(vm.$el.textContent.trim()).not.toContain('This is a success');
- expect(vm.$el.textContent.trim()).not.toContain('This is loading');
+ expect(wrapper.text()).toContain('This is an error');
+ expect(wrapper.text()).not.toContain('This is a success');
+ expect(wrapper.text()).not.toContain('This is loading');
});
it('only renders loading slot when status is "LOADING"', () => {
createComponentWithSlots('LOADING');
- expect(vm.$el.textContent.trim()).toContain('This is loading');
- expect(vm.$el.textContent.trim()).not.toContain('This is an error');
- expect(vm.$el.textContent.trim()).not.toContain('This is a success');
+ expect(wrapper.text()).toContain('This is loading');
+ expect(wrapper.text()).not.toContain('This is an error');
+ expect(wrapper.text()).not.toContain('This is a success');
});
});
@@ -280,9 +260,7 @@ describe('Report section', () => {
};
beforeEach(() => {
- createComponent({
- popoverOptions: options,
- });
+ createComponent({ props: { popoverOptions: options } });
});
it('popover is shown with options', () => {
@@ -292,7 +270,7 @@ describe('Report section', () => {
describe('when popover options are not defined', () => {
beforeEach(() => {
- createComponent({ popoverOptions: {} });
+ createComponent({ props: { popoverOptions: {} } });
});
it('popover is not shown', () => {
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 2b70cb84c67..0f7cf4e61b2 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -21,12 +21,13 @@ import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
import userInfoQuery from '~/repository/queries/user_info.query.graphql';
import applicationInfoQuery from '~/repository/queries/application_info.query.graphql';
import CodeIntelligence from '~/code_navigation/components/app.vue';
-import { redirectTo } from '~/lib/utils/url_utility';
+import * as urlUtility from '~/lib/utils/url_utility';
import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import httpStatusCodes from '~/lib/utils/http_status';
import LineHighlighter from '~/blob/line_highlighter';
import { LEGACY_FILE_TYPES } from '~/repository/constants';
+import { SIMPLE_BLOB_VIEWER, RICH_BLOB_VIEWER } from '~/blob/components/constants';
import {
simpleViewerMock,
richViewerMock,
@@ -53,7 +54,12 @@ const mockAxios = new MockAdapter(axios);
const createMockStore = () =>
new Vuex.Store({ actions: { fetchData: jest.fn, setInitialData: jest.fn() } });
-const createComponent = async (mockData = {}, mountFn = shallowMount) => {
+const mockRouterPush = jest.fn();
+const mockRouter = {
+ push: mockRouterPush,
+};
+
+const createComponent = async (mockData = {}, mountFn = shallowMount, mockRoute = {}) => {
Vue.use(VueApollo);
const {
@@ -106,6 +112,10 @@ const createComponent = async (mockData = {}, mountFn = shallowMount) => {
apolloProvider: fakeApollo,
propsData: propsMock,
mixins: [{ data: () => ({ ref: refMock }) }],
+ mocks: {
+ $route: mockRoute,
+ $router: mockRouter,
+ },
provide: {
targetBranch: 'test',
originalBranch: 'default-ref',
@@ -158,10 +168,11 @@ describe('Blob content viewer component', () => {
it('renders a BlobHeader component', async () => {
await createComponent();
- expect(findBlobHeader().props('activeViewerType')).toEqual('simple');
+ expect(findBlobHeader().props('activeViewerType')).toEqual(SIMPLE_BLOB_VIEWER);
expect(findBlobHeader().props('hasRenderError')).toEqual(false);
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(true);
expect(findBlobHeader().props('blob')).toEqual(simpleViewerMock);
+ expect(mockRouterPush).not.toHaveBeenCalled();
});
it('copies blob text to clipboard', async () => {
@@ -179,7 +190,7 @@ describe('Blob content viewer component', () => {
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'text',
tooLarge: false,
- type: 'simple',
+ type: SIMPLE_BLOB_VIEWER,
renderError: null,
});
});
@@ -229,6 +240,12 @@ describe('Blob content viewer component', () => {
expect(LineHighlighter).toHaveBeenCalled();
});
+ it('does not load the LineHighlighter for RichViewers', async () => {
+ mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
+ await createComponent({ blob: { ...richViewerMock, fileType, highlightJs } });
+ expect(LineHighlighter).not.toHaveBeenCalled();
+ });
+
it('scrolls to the hash', async () => {
mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
@@ -241,10 +258,11 @@ describe('Blob content viewer component', () => {
it('renders a BlobHeader component', async () => {
await createComponent({ blob: richViewerMock });
- expect(findBlobHeader().props('activeViewerType')).toEqual('rich');
+ expect(findBlobHeader().props('activeViewerType')).toEqual(RICH_BLOB_VIEWER);
expect(findBlobHeader().props('hasRenderError')).toEqual(false);
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(false);
expect(findBlobHeader().props('blob')).toEqual(richViewerMock);
+ expect(mockRouterPush).not.toHaveBeenCalled();
});
it('renders a BlobContent component', async () => {
@@ -254,30 +272,49 @@ describe('Blob content viewer component', () => {
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'markup',
tooLarge: false,
- type: 'rich',
+ type: RICH_BLOB_VIEWER,
renderError: null,
});
});
- it('updates viewer type when viewer changed is clicked', async () => {
+ it('changes to simple viewer when URL has code line hash', async () => {
+ jest.spyOn(urlUtility, 'getLocationHash').mockReturnValueOnce('L5');
+
await createComponent({ blob: richViewerMock });
expect(findBlobContent().props('activeViewer')).toEqual(
expect.objectContaining({
- type: 'rich',
+ type: SIMPLE_BLOB_VIEWER,
+ }),
+ );
+ expect(findBlobHeader().props('activeViewerType')).toEqual(SIMPLE_BLOB_VIEWER);
+ });
+
+ it('updates viewer type when viewer changed is clicked', async () => {
+ await createComponent({ blob: richViewerMock }, shallowMount, { path: '/mock_path' });
+
+ expect(findBlobContent().props('activeViewer')).toEqual(
+ expect.objectContaining({
+ type: RICH_BLOB_VIEWER,
}),
);
- expect(findBlobHeader().props('activeViewerType')).toEqual('rich');
+ expect(findBlobHeader().props('activeViewerType')).toEqual(RICH_BLOB_VIEWER);
- findBlobHeader().vm.$emit('viewer-changed', 'simple');
+ findBlobHeader().vm.$emit('viewer-changed', SIMPLE_BLOB_VIEWER);
await nextTick();
- expect(findBlobHeader().props('activeViewerType')).toEqual('simple');
+ expect(findBlobHeader().props('activeViewerType')).toEqual(SIMPLE_BLOB_VIEWER);
expect(findBlobContent().props('activeViewer')).toEqual(
expect.objectContaining({
- type: 'simple',
+ type: SIMPLE_BLOB_VIEWER,
}),
);
+ expect(mockRouterPush).toHaveBeenCalledWith({
+ path: '/mock_path',
+ query: {
+ plain: '1',
+ },
+ });
});
});
@@ -497,12 +534,12 @@ describe('Blob content viewer component', () => {
it('simple edit redirects to the simple editor', () => {
findWebIdeLink().vm.$emit('edit', 'simple');
- expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.editBlobPath);
+ expect(urlUtility.redirectTo).toHaveBeenCalledWith(simpleViewerMock.editBlobPath);
});
it('IDE edit redirects to the IDE editor', () => {
findWebIdeLink().vm.$emit('edit', 'ide');
- expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.ideEditPath);
+ expect(urlUtility.redirectTo).toHaveBeenCalledWith(simpleViewerMock.ideEditPath);
});
it.each`
@@ -537,4 +574,32 @@ describe('Blob content viewer component', () => {
},
);
});
+
+ describe('active viewer based on plain attribute', () => {
+ it.each`
+ hasRichViewer | plain | activeViewerType
+ ${true} | ${'0'} | ${RICH_BLOB_VIEWER}
+ ${true} | ${'1'} | ${SIMPLE_BLOB_VIEWER}
+ ${false} | ${'0'} | ${SIMPLE_BLOB_VIEWER}
+ ${false} | ${'1'} | ${SIMPLE_BLOB_VIEWER}
+ `(
+ 'activeViewerType is `$activeViewerType` when hasRichViewer is $hasRichViewer and plain is set to $plain',
+ async ({ hasRichViewer, plain, activeViewerType }) => {
+ await createComponent(
+ { blob: hasRichViewer ? richViewerMock : simpleViewerMock },
+ shallowMount,
+ { query: { plain } },
+ );
+
+ await nextTick();
+
+ expect(findBlobContent().props('activeViewer')).toEqual(
+ expect.objectContaining({
+ type: activeViewerType,
+ }),
+ );
+ expect(findBlobHeader().props('activeViewerType')).toEqual(activeViewerType);
+ },
+ );
+ });
});
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
index 0a5766a25f9..4db295fe0b7 100644
--- a/spec/frontend/repository/mock_data.js
+++ b/spec/frontend/repository/mock_data.js
@@ -8,6 +8,7 @@ export const simpleViewerMock = {
language: 'javascript',
path: 'some_file.js',
webPath: 'some_file.js',
+ blamePath: 'blame/file.js',
editBlobPath: 'some_file.js/edit',
gitpodBlobUrl: 'https://gitpod.io#path/to/blob.js',
ideEditPath: 'some_file.js/ide/edit',
diff --git a/spec/frontend/right_sidebar_spec.js b/spec/frontend/right_sidebar_spec.js
index 5847842f5a6..3b220ba8351 100644
--- a/spec/frontend/right_sidebar_spec.js
+++ b/spec/frontend/right_sidebar_spec.js
@@ -70,7 +70,7 @@ describe('RightSidebar', () => {
it('should not hide collapsed icons', () => {
[].forEach.call(document.querySelectorAll('.sidebar-collapsed-icon'), (el) => {
- expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBeFalsy();
+ expect(el.querySelector('.fa, svg').classList.contains('hidden')).toBe(false);
});
});
});
diff --git a/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js b/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
index 8a34cb14d8b..ffe3599ac64 100644
--- a/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
+++ b/spec/frontend/runner/admin_runner_edit/admin_runner_edit_app_spec.js
@@ -87,10 +87,10 @@ describe('AdminRunnerEditApp', () => {
await createComponentWithApollo();
expect(findRunnerUpdateForm().props()).toMatchObject({
- runner: mockRunner,
loading: false,
runnerPath: mockRunnerPath,
});
+ expect(findRunnerUpdateForm().props('runner')).toEqual(mockRunner);
});
describe('When there is an error', () => {
diff --git a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
index 433be5d5027..509681c5a77 100644
--- a/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
+++ b/spec/frontend/runner/admin_runner_show/admin_runner_show_app_spec.js
@@ -14,6 +14,7 @@ import RunnerPauseButton from '~/runner/components/runner_pause_button.vue';
import RunnerDeleteButton from '~/runner/components/runner_delete_button.vue';
import RunnerEditButton from '~/runner/components/runner_edit_button.vue';
import RunnersJobs from '~/runner/components/runner_jobs.vue';
+
import runnerQuery from '~/runner/graphql/show/runner.query.graphql';
import AdminRunnerShowApp from '~/runner/admin_runner_show/admin_runner_show_app.vue';
import { captureException } from '~/runner/sentry_utils';
@@ -94,10 +95,10 @@ describe('AdminRunnerShowApp', () => {
});
it('shows basic runner details', async () => {
- const expected = `Description Instance runner
+ const expected = `Description My Runner
Last contact Never contacted
Version 1.0.0
- IP Address 127.0.0.1
+ IP Address None
Executor None
Architecture None
Platform darwin
@@ -182,17 +183,19 @@ describe('AdminRunnerShowApp', () => {
});
describe('When loading', () => {
- beforeEach(() => {
+ it('does not show runner details', () => {
mockRunnerQueryResult();
createComponent();
- });
- it('does not show runner details', () => {
expect(findRunnerDetails().exists()).toBe(false);
});
it('does not show runner jobs', () => {
+ mockRunnerQueryResult();
+
+ createComponent();
+
expect(findRunnersJobs().exists()).toBe(false);
});
});
diff --git a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
index aa1aa723491..97341be7d5d 100644
--- a/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
+++ b/spec/frontend/runner/admin_runners/admin_runners_app_spec.js
@@ -1,4 +1,4 @@
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import { GlToast, GlLink } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -19,10 +19,11 @@ import { createLocalState } from '~/runner/graphql/list/local_state';
import AdminRunnersApp from '~/runner/admin_runners/admin_runners_app.vue';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
+import RunnerBulkDelete from '~/runner/components/runner_bulk_delete.vue';
+import RunnerBulkDeleteCheckbox from '~/runner/components/runner_bulk_delete_checkbox.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
-import RunnerCount from '~/runner/components/stat/runner_count.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -37,12 +38,10 @@ import {
PARAM_KEY_STATUS,
PARAM_KEY_TAG,
STATUS_ONLINE,
- STATUS_OFFLINE,
- STATUS_STALE,
RUNNER_PAGE_SIZE,
} from '~/runner/constants';
import allRunnersQuery from 'ee_else_ce/runner/graphql/list/all_runners.query.graphql';
-import allRunnersCountQuery from '~/runner/graphql/list/all_runners_count.query.graphql';
+import allRunnersCountQuery from 'ee_else_ce/runner/graphql/list/all_runners_count.query.graphql';
import { captureException } from '~/runner/sentry_utils';
import {
@@ -51,6 +50,7 @@ import {
allRunnersDataPaginated,
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyPageInfo,
emptyStateSvgPath,
emptyStateFilteredSvgPath,
} from '../mock_data';
@@ -72,19 +72,24 @@ jest.mock('~/lib/utils/url_utility', () => ({
Vue.use(VueApollo);
Vue.use(GlToast);
+const COUNT_QUERIES = 7; // 4 tabs + 3 status queries
+
describe('AdminRunnersApp', () => {
let wrapper;
let cacheConfig;
let localMutations;
+ let showToast;
const findRunnerStats = () => wrapper.findComponent(RunnerStats);
const findRunnerActionsCell = () => wrapper.findComponent(RunnerActionsCell);
const findRegistrationDropdown = () => wrapper.findComponent(RegistrationDropdown);
const findRunnerTypeTabs = () => wrapper.findComponent(RunnerTypeTabs);
+ const findRunnerBulkDelete = () => wrapper.findComponent(RunnerBulkDelete);
+ const findRunnerBulkDeleteCheckbox = () => wrapper.findComponent(RunnerBulkDeleteCheckbox);
const findRunnerList = () => wrapper.findComponent(RunnerList);
const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
- const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
+ const findRunnerPaginationNext = () => findRunnerPagination().findByText(s__('Pagination|Next'));
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const createComponent = ({
@@ -117,6 +122,8 @@ describe('AdminRunnersApp', () => {
...options,
});
+ showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
+
return waitForPromises();
};
@@ -128,17 +135,10 @@ describe('AdminRunnersApp', () => {
afterEach(() => {
mockRunnersHandler.mockReset();
mockRunnersCountHandler.mockReset();
+ showToast.mockReset();
wrapper.destroy();
});
- it('shows the runner tabs with a runner count for each type', async () => {
- await createComponent({ mountFn: mountExtended });
-
- expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
- `All ${mockRunnersCount} Instance ${mockRunnersCount} Group ${mockRunnersCount} Project ${mockRunnersCount}`,
- );
- });
-
it('shows the runner setup instructions', () => {
createComponent();
@@ -146,27 +146,38 @@ describe('AdminRunnersApp', () => {
expect(findRegistrationDropdown().props('type')).toBe(INSTANCE_TYPE);
});
- it('shows total runner counts', async () => {
- await createComponent({ mountFn: mountExtended });
+ describe('shows total runner counts', () => {
+ beforeEach(async () => {
+ await createComponent({ mountFn: mountExtended });
+ });
+
+ it('fetches counts', () => {
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
+ });
+
+ it('shows the runner tabs', () => {
+ expect(findRunnerTypeTabs().text()).toMatchInterpolatedText(
+ `All ${mockRunnersCount} Instance ${mockRunnersCount} Group ${mockRunnersCount} Project ${mockRunnersCount}`,
+ );
+ });
- expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_ONLINE });
- expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_OFFLINE });
- expect(mockRunnersCountHandler).toHaveBeenCalledWith({ status: STATUS_STALE });
-
- expect(findRunnerStats().text()).toContain(
- `${s__('Runners|Online runners')} ${mockRunnersCount}`,
- );
- expect(findRunnerStats().text()).toContain(
- `${s__('Runners|Offline runners')} ${mockRunnersCount}`,
- );
- expect(findRunnerStats().text()).toContain(
- `${s__('Runners|Stale runners')} ${mockRunnersCount}`,
- );
+ it('shows the total', () => {
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Online runners')} ${mockRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Offline runners')} ${mockRunnersCount}`,
+ );
+ expect(findRunnerStats().text()).toContain(
+ `${s__('Runners|Stale runners')} ${mockRunnersCount}`,
+ );
+ });
});
it('shows the runners list', async () => {
await createComponent();
+ expect(mockRunnersHandler).toHaveBeenCalledTimes(1);
expect(findRunnerList().props('runners')).toEqual(mockRunners);
});
@@ -226,18 +237,13 @@ describe('AdminRunnersApp', () => {
});
describe('Single runner row', () => {
- let showToast;
-
const { id: graphqlId, shortSha } = mockRunners[0];
const id = getIdFromGraphQLId(graphqlId);
- const COUNT_QUERIES = 7; // Smart queries that display a filtered count of runners
- const FILTERED_COUNT_QUERIES = 4; // Smart queries that display a count of runners in tabs
beforeEach(async () => {
mockRunnersCountHandler.mockClear();
await createComponent({ mountFn: mountExtended });
- showToast = jest.spyOn(wrapper.vm.$root.$toast, 'show');
});
it('Links to the runner page', async () => {
@@ -252,7 +258,7 @@ describe('AdminRunnersApp', () => {
findRunnerActionsCell().vm.$emit('toggledPaused');
- expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES + FILTERED_COUNT_QUERIES);
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES * 2);
expect(showToast).toHaveBeenCalledTimes(0);
});
@@ -266,25 +272,20 @@ describe('AdminRunnersApp', () => {
describe('when a filter is preselected', () => {
beforeEach(async () => {
- setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}&tag[]=tag1`);
+ setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}&paused[]=true`);
- await createComponent({
- stubs: {
- RunnerStats,
- RunnerCount,
- },
- });
+ await createComponent({ mountFn: mountExtended });
});
it('sets the filters in the search bar', () => {
expect(findRunnerFilteredSearchBar().props('value')).toEqual({
runnerType: INSTANCE_TYPE,
filters: [
- { type: 'status', value: { data: STATUS_ONLINE, operator: '=' } },
- { type: 'tag', value: { data: 'tag1', operator: '=' } },
+ { type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } },
+ { type: PARAM_KEY_PAUSED, value: { data: 'true', operator: '=' } },
],
sort: 'CREATED_DESC',
- pagination: { page: 1 },
+ pagination: {},
});
});
@@ -292,7 +293,7 @@ describe('AdminRunnersApp', () => {
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
type: INSTANCE_TYPE,
- tagList: ['tag1'],
+ paused: true,
sort: DEFAULT_SORT,
first: RUNNER_PAGE_SIZE,
});
@@ -302,41 +303,34 @@ describe('AdminRunnersApp', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledWith({
type: INSTANCE_TYPE,
status: STATUS_ONLINE,
- tagList: ['tag1'],
+ paused: true,
});
});
});
describe('when a filter is selected by the user', () => {
- beforeEach(() => {
- createComponent({
- stubs: {
- RunnerStats,
- RunnerCount,
- },
- });
+ beforeEach(async () => {
+ await createComponent({ mountFn: mountExtended });
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
- filters: [
- { type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } },
- { type: PARAM_KEY_TAG, value: { data: 'tag1', operator: '=' } },
- ],
+ filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
sort: CREATED_ASC,
});
+
+ await nextTick();
});
it('updates the browser url', () => {
expect(updateHistory).toHaveBeenLastCalledWith({
title: expect.any(String),
- url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
+ url: expect.stringContaining('?status[]=ONLINE&sort=CREATED_ASC'),
});
});
it('requests the runners with filters', () => {
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
status: STATUS_ONLINE,
- tagList: ['tag1'],
sort: CREATED_ASC,
first: RUNNER_PAGE_SIZE,
});
@@ -344,7 +338,6 @@ describe('AdminRunnersApp', () => {
it('fetches count results for requested status', () => {
expect(mockRunnersCountHandler).toHaveBeenCalledWith({
- tagList: ['tag1'],
status: STATUS_ONLINE,
});
});
@@ -353,39 +346,79 @@ describe('AdminRunnersApp', () => {
it('when runners have not loaded, shows a loading state', () => {
createComponent();
expect(findRunnerList().props('loading')).toBe(true);
+ expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
describe('when bulk delete is enabled', () => {
- beforeEach(() => {
- createComponent({
- provide: {
- glFeatures: { adminRunnersBulkDelete: true },
- },
+ describe('Before runners are deleted', () => {
+ beforeEach(async () => {
+ await createComponent({
+ mountFn: mountExtended,
+ provide: {
+ glFeatures: { adminRunnersBulkDelete: true },
+ },
+ });
});
- });
- it('runner list is checkable', () => {
- expect(findRunnerList().props('checkable')).toBe(true);
+ it('runner bulk delete is available', () => {
+ expect(findRunnerBulkDelete().props('runners')).toEqual(mockRunners);
+ });
+
+ it('runner bulk delete checkbox is available', () => {
+ expect(findRunnerBulkDeleteCheckbox().props('runners')).toEqual(mockRunners);
+ });
+
+ it('runner list is checkable', () => {
+ expect(findRunnerList().props('checkable')).toBe(true);
+ });
+
+ it('responds to checked items by updating the local cache', () => {
+ const setRunnerCheckedMock = jest
+ .spyOn(localMutations, 'setRunnerChecked')
+ .mockImplementation(() => {});
+
+ const runner = mockRunners[0];
+
+ expect(setRunnerCheckedMock).toHaveBeenCalledTimes(0);
+
+ findRunnerList().vm.$emit('checked', {
+ runner,
+ isChecked: true,
+ });
+
+ expect(setRunnerCheckedMock).toHaveBeenCalledTimes(1);
+ expect(setRunnerCheckedMock).toHaveBeenCalledWith({
+ runner,
+ isChecked: true,
+ });
+ });
});
- it('responds to checked items by updating the local cache', () => {
- const setRunnerCheckedMock = jest
- .spyOn(localMutations, 'setRunnerChecked')
- .mockImplementation(() => {});
+ describe('When runners are deleted', () => {
+ beforeEach(async () => {
+ await createComponent({
+ mountFn: mountExtended,
+ provide: {
+ glFeatures: { adminRunnersBulkDelete: true },
+ },
+ });
+ });
- const runner = mockRunners[0];
+ it('count data is refetched', async () => {
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES);
- expect(setRunnerCheckedMock).toHaveBeenCalledTimes(0);
+ findRunnerBulkDelete().vm.$emit('deleted', { message: 'Runners deleted' });
- findRunnerList().vm.$emit('checked', {
- runner,
- isChecked: true,
+ expect(mockRunnersCountHandler).toHaveBeenCalledTimes(COUNT_QUERIES * 2);
});
- expect(setRunnerCheckedMock).toHaveBeenCalledTimes(1);
- expect(setRunnerCheckedMock).toHaveBeenCalledWith({
- runner,
- isChecked: true,
+ it('toast is shown', async () => {
+ expect(showToast).toHaveBeenCalledTimes(0);
+
+ findRunnerBulkDelete().vm.$emit('deleted', { message: 'Runners deleted' });
+
+ expect(showToast).toHaveBeenCalledTimes(1);
+ expect(showToast).toHaveBeenCalledWith('Runners deleted');
});
});
});
@@ -394,13 +427,20 @@ describe('AdminRunnersApp', () => {
beforeEach(async () => {
mockRunnersHandler.mockResolvedValue({
data: {
- runners: { nodes: [] },
+ runners: {
+ nodes: [],
+ pageInfo: emptyPageInfo,
+ },
},
});
await createComponent();
});
+ it('shows no errors', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+
it('shows an empty state', () => {
expect(findRunnerListEmptyState().props('isSearchFiltered')).toBe(false);
});
@@ -440,19 +480,25 @@ describe('AdminRunnersApp', () => {
});
describe('Pagination', () => {
+ const { pageInfo } = allRunnersDataPaginated.data.runners;
+
beforeEach(async () => {
mockRunnersHandler.mockResolvedValue(allRunnersDataPaginated);
await createComponent({ mountFn: mountExtended });
});
+ it('passes the page info', () => {
+ expect(findRunnerPagination().props('pageInfo')).toEqual(pageInfo);
+ });
+
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
expect(mockRunnersHandler).toHaveBeenLastCalledWith({
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
- after: allRunnersDataPaginated.data.runners.pageInfo.endCursor,
+ after: pageInfo.endCursor,
});
});
});
diff --git a/spec/frontend/runner/components/cells/runner_summary_cell_spec.js b/spec/frontend/runner/components/cells/runner_summary_cell_spec.js
index b2e8c5a3ad9..b06ab652212 100644
--- a/spec/frontend/runner/components/cells/runner_summary_cell_spec.js
+++ b/spec/frontend/runner/components/cells/runner_summary_cell_spec.js
@@ -1,3 +1,4 @@
+import { __ } from '~/locale';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import RunnerSummaryCell from '~/runner/components/cells/runner_summary_cell.vue';
import { INSTANCE_TYPE, PROJECT_TYPE } from '~/runner/constants';
@@ -61,8 +62,16 @@ describe('RunnerTypeCell', () => {
expect(wrapper.text()).toContain(mockDescription);
});
- it('Displays the runner ip address', () => {
- expect(wrapper.text()).toContain(mockIpAddress);
+ it('Displays ip address', () => {
+ expect(wrapper.text()).toContain(`${__('IP Address')} ${mockIpAddress}`);
+ });
+
+ it('Displays no ip address', () => {
+ createComponent({
+ ipAddress: null,
+ });
+
+ expect(wrapper.text()).not.toContain(__('IP Address'));
});
it('Displays a custom slot', () => {
diff --git a/spec/frontend/runner/components/registration/registration_token_spec.js b/spec/frontend/runner/components/registration/registration_token_spec.js
index ed1a698d36f..19344a68f79 100644
--- a/spec/frontend/runner/components/registration/registration_token_spec.js
+++ b/spec/frontend/runner/components/registration/registration_token_spec.js
@@ -1,5 +1,5 @@
import { GlToast } from '@gitlab/ui';
-import { createLocalVue } from '@vue/test-utils';
+import Vue from 'vue';
import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RegistrationToken from '~/runner/components/registration/registration_token.vue';
import InputCopyToggleVisibility from '~/vue_shared/components/form/input_copy_toggle_visibility.vue';
@@ -11,28 +11,17 @@ describe('RegistrationToken', () => {
let wrapper;
let showToast;
- const findInputCopyToggleVisibility = () => wrapper.findComponent(InputCopyToggleVisibility);
-
- const vueWithGlToast = () => {
- const localVue = createLocalVue();
- localVue.use(GlToast);
- return localVue;
- };
+ Vue.use(GlToast);
- const createComponent = ({
- props = {},
- withGlToast = true,
- mountFn = shallowMountExtended,
- } = {}) => {
- const localVue = withGlToast ? vueWithGlToast() : undefined;
+ const findInputCopyToggleVisibility = () => wrapper.findComponent(InputCopyToggleVisibility);
+ const createComponent = ({ props = {}, mountFn = shallowMountExtended } = {}) => {
wrapper = mountFn(RegistrationToken, {
propsData: {
value: mockToken,
inputId: 'token-value',
...props,
},
- localVue,
});
showToast = wrapper.vm.$toast ? jest.spyOn(wrapper.vm.$toast, 'show') : null;
@@ -69,13 +58,5 @@ describe('RegistrationToken', () => {
expect(showToast).toHaveBeenCalledTimes(1);
expect(showToast).toHaveBeenCalledWith('Registration token copied!');
});
-
- it('does not fail when toast is not defined', () => {
- createComponent({ withGlToast: false });
- findInputCopyToggleVisibility().vm.$emit('copy');
-
- // This block also tests for unhandled errors
- expect(showToast).toBeNull();
- });
});
});
diff --git a/spec/frontend/runner/components/runner_assigned_item_spec.js b/spec/frontend/runner/components/runner_assigned_item_spec.js
index 1ff6983fbe7..cc09046c000 100644
--- a/spec/frontend/runner/components/runner_assigned_item_spec.js
+++ b/spec/frontend/runner/components/runner_assigned_item_spec.js
@@ -1,10 +1,12 @@
-import { GlAvatar } from '@gitlab/ui';
+import { GlAvatar, GlBadge } from '@gitlab/ui';
+import { s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import RunnerAssignedItem from '~/runner/components/runner_assigned_item.vue';
import { AVATAR_SHAPE_OPTION_RECT } from '~/vue_shared/constants';
const mockHref = '/group/project';
const mockName = 'Project';
+const mockDescription = 'Project description';
const mockFullName = 'Group / Project';
const mockAvatarUrl = '/avatar.png';
@@ -12,6 +14,7 @@ describe('RunnerAssignedItem', () => {
let wrapper;
const findAvatar = () => wrapper.findByTestId('item-avatar');
+ const findBadge = () => wrapper.findComponent(GlBadge);
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(RunnerAssignedItem, {
@@ -20,6 +23,7 @@ describe('RunnerAssignedItem', () => {
name: mockName,
fullName: mockFullName,
avatarUrl: mockAvatarUrl,
+ description: mockDescription,
...props,
},
});
@@ -51,4 +55,14 @@ describe('RunnerAssignedItem', () => {
expect(groupFullName.attributes('href')).toBe(mockHref);
});
+
+ it('Shows description', () => {
+ expect(wrapper.text()).toContain(mockDescription);
+ });
+
+ it('Shows owner badge', () => {
+ createComponent({ props: { isOwner: true } });
+
+ expect(findBadge().text()).toBe(s__('Runner|Owner'));
+ });
});
diff --git a/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js b/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js
new file mode 100644
index 00000000000..0ac89e82314
--- /dev/null
+++ b/spec/frontend/runner/components/runner_bulk_delete_checkbox_spec.js
@@ -0,0 +1,101 @@
+import Vue from 'vue';
+import { GlFormCheckbox } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import RunnerBulkDeleteCheckbox from '~/runner/components/runner_bulk_delete_checkbox.vue';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { createLocalState } from '~/runner/graphql/list/local_state';
+import { allRunnersData } from '../mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('~/flash');
+
+describe('RunnerBulkDeleteCheckbox', () => {
+ let wrapper;
+ let mockState;
+ let mockCheckedRunnerIds;
+
+ const findCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+
+ const mockRunners = allRunnersData.data.runners.nodes;
+ const mockIds = allRunnersData.data.runners.nodes.map(({ id }) => id);
+ const mockId = mockIds[0];
+ const mockIdAnotherPage = 'RUNNER_IN_ANOTHER_PAGE_ID';
+
+ const createComponent = ({ props = {} } = {}) => {
+ const { cacheConfig, localMutations } = mockState;
+ const apolloProvider = createMockApollo(undefined, undefined, cacheConfig);
+
+ wrapper = shallowMountExtended(RunnerBulkDeleteCheckbox, {
+ apolloProvider,
+ provide: {
+ localMutations,
+ },
+ propsData: {
+ runners: mockRunners,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ mockState = createLocalState();
+
+ jest
+ .spyOn(mockState.cacheConfig.typePolicies.Query.fields, 'checkedRunnerIds')
+ .mockImplementation(() => mockCheckedRunnerIds);
+
+ jest.spyOn(mockState.localMutations, 'setRunnersChecked');
+ });
+
+ describe.each`
+ case | is | checkedRunnerIds | disabled | checked | indeterminate
+ ${'no runners'} | ${'unchecked'} | ${[]} | ${undefined} | ${undefined} | ${undefined}
+ ${'no runners in this page'} | ${'unchecked'} | ${[mockIdAnotherPage]} | ${undefined} | ${undefined} | ${undefined}
+ ${'all runners'} | ${'checked'} | ${mockIds} | ${undefined} | ${'true'} | ${undefined}
+ ${'some runners'} | ${'indeterminate'} | ${[mockId]} | ${undefined} | ${undefined} | ${'true'}
+ ${'all plus other runners'} | ${'checked'} | ${[...mockIds, mockIdAnotherPage]} | ${undefined} | ${'true'} | ${undefined}
+ `('When $case are checked', ({ is, checkedRunnerIds, disabled, checked, indeterminate }) => {
+ beforeEach(async () => {
+ mockCheckedRunnerIds = checkedRunnerIds;
+
+ createComponent();
+ });
+
+ it(`is ${is}`, () => {
+ expect(findCheckbox().attributes('disabled')).toBe(disabled);
+ expect(findCheckbox().attributes('checked')).toBe(checked);
+ expect(findCheckbox().attributes('indeterminate')).toBe(indeterminate);
+ });
+ });
+
+ describe('When user selects', () => {
+ beforeEach(() => {
+ mockCheckedRunnerIds = mockIds;
+ createComponent();
+ });
+
+ it.each([[true], [false]])('sets checked to %s', (checked) => {
+ findCheckbox().vm.$emit('change', checked);
+
+ expect(mockState.localMutations.setRunnersChecked).toHaveBeenCalledTimes(1);
+ expect(mockState.localMutations.setRunnersChecked).toHaveBeenCalledWith({
+ isChecked: checked,
+ runners: mockRunners,
+ });
+ });
+ });
+
+ describe('When runners are loading', () => {
+ beforeEach(() => {
+ createComponent({ props: { runners: [] } });
+ });
+
+ it(`is disabled`, () => {
+ expect(findCheckbox().attributes('disabled')).toBe('true');
+ expect(findCheckbox().attributes('checked')).toBe(undefined);
+ expect(findCheckbox().attributes('indeterminate')).toBe(undefined);
+ });
+ });
+});
diff --git a/spec/frontend/runner/components/runner_bulk_delete_spec.js b/spec/frontend/runner/components/runner_bulk_delete_spec.js
index f5b56396cf1..6df918c684f 100644
--- a/spec/frontend/runner/components/runner_bulk_delete_spec.js
+++ b/spec/frontend/runner/components/runner_bulk_delete_spec.js
@@ -1,37 +1,65 @@
import Vue from 'vue';
-import { GlSprintf } from '@gitlab/ui';
+import { GlModal, GlSprintf } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
+import { createAlert } from '~/flash';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
+import { s__ } from '~/locale';
import RunnerBulkDelete from '~/runner/components/runner_bulk_delete.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
+import BulkRunnerDeleteMutation from '~/runner/graphql/list/bulk_runner_delete.mutation.graphql';
import { createLocalState } from '~/runner/graphql/list/local_state';
import waitForPromises from 'helpers/wait_for_promises';
+import { allRunnersData } from '../mock_data';
Vue.use(VueApollo);
-jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+jest.mock('~/flash');
describe('RunnerBulkDelete', () => {
let wrapper;
+ let apolloCache;
let mockState;
let mockCheckedRunnerIds;
- const findClearBtn = () => wrapper.findByTestId('clear-btn');
- const findDeleteBtn = () => wrapper.findByTestId('delete-btn');
+ const findClearBtn = () => wrapper.findByText(s__('Runners|Clear selection'));
+ const findDeleteBtn = () => wrapper.findByText(s__('Runners|Delete selected'));
+ const findModal = () => wrapper.findComponent(GlModal);
+
+ const mockRunners = allRunnersData.data.runners.nodes;
+ const mockId1 = allRunnersData.data.runners.nodes[0].id;
+ const mockId2 = allRunnersData.data.runners.nodes[1].id;
+
+ const bulkRunnerDeleteHandler = jest.fn();
const createComponent = () => {
const { cacheConfig, localMutations } = mockState;
+ const apolloProvider = createMockApollo(
+ [[BulkRunnerDeleteMutation, bulkRunnerDeleteHandler]],
+ undefined,
+ cacheConfig,
+ );
wrapper = shallowMountExtended(RunnerBulkDelete, {
- apolloProvider: createMockApollo(undefined, undefined, cacheConfig),
+ apolloProvider,
provide: {
localMutations,
},
+ propsData: {
+ runners: mockRunners,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
stubs: {
GlSprintf,
+ GlModal,
},
});
+
+ apolloCache = apolloProvider.defaultClient.cache;
+ jest.spyOn(apolloCache, 'evict');
+ jest.spyOn(apolloCache, 'gc');
};
beforeEach(() => {
@@ -43,6 +71,7 @@ describe('RunnerBulkDelete', () => {
});
afterEach(() => {
+ bulkRunnerDeleteHandler.mockReset();
wrapper.destroy();
});
@@ -61,10 +90,10 @@ describe('RunnerBulkDelete', () => {
});
describe.each`
- count | ids | text
- ${1} | ${['gid:Runner/1']} | ${'1 runner'}
- ${2} | ${['gid:Runner/1', 'gid:Runner/2']} | ${'2 runners'}
- `('When $count runner(s) are checked', ({ count, ids, text }) => {
+ count | ids | text
+ ${1} | ${[mockId1]} | ${'1 runner'}
+ ${2} | ${[mockId1, mockId2]} | ${'2 runners'}
+ `('When $count runner(s) are checked', ({ ids, text }) => {
beforeEach(() => {
mockCheckedRunnerIds = ids;
@@ -86,18 +115,129 @@ describe('RunnerBulkDelete', () => {
});
it('shows confirmation modal', () => {
- expect(confirmAction).toHaveBeenCalledTimes(0);
+ const modalId = getBinding(findDeleteBtn().element, 'gl-modal');
+
+ expect(findModal().props('modal-id')).toBe(modalId);
+ expect(findModal().text()).toContain(text);
+ });
+ });
+
+ describe('when runners are deleted', () => {
+ let evt;
+ let mockHideModal;
+
+ beforeEach(() => {
+ mockCheckedRunnerIds = [mockId1, mockId2];
+
+ createComponent();
+
+ jest.spyOn(mockState.localMutations, 'clearChecked').mockImplementation(() => {});
+ mockHideModal = jest.spyOn(findModal().vm, 'hide');
+ });
+
+ describe('when deletion is successful', () => {
+ beforeEach(() => {
+ bulkRunnerDeleteHandler.mockResolvedValue({
+ data: {
+ bulkRunnerDelete: { deletedIds: mockCheckedRunnerIds, errors: [] },
+ },
+ });
+
+ evt = {
+ preventDefault: jest.fn(),
+ };
+ findModal().vm.$emit('primary', evt);
+ });
+
+ it('has loading state', async () => {
+ expect(findModal().props('actionPrimary').attributes.loading).toBe(true);
+ expect(findModal().props('actionCancel').attributes.loading).toBe(true);
+
+ await waitForPromises();
+
+ expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
+ expect(findModal().props('actionCancel').attributes.loading).toBe(false);
+ });
+
+ it('modal is not prevented from closing', () => {
+ expect(evt.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('mutation is called', async () => {
+ expect(bulkRunnerDeleteHandler).toHaveBeenCalledWith({
+ input: { ids: mockCheckedRunnerIds },
+ });
+ });
+
+ it('user interface is updated', async () => {
+ const { evict, gc } = apolloCache;
+
+ expect(evict).toHaveBeenCalledTimes(mockCheckedRunnerIds.length);
+ expect(evict).toHaveBeenCalledWith({
+ id: expect.stringContaining(mockCheckedRunnerIds[0]),
+ });
+ expect(evict).toHaveBeenCalledWith({
+ id: expect.stringContaining(mockCheckedRunnerIds[1]),
+ });
+
+ expect(gc).toHaveBeenCalledTimes(1);
+ });
+
+ it('modal is hidden', () => {
+ expect(mockHideModal).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('when deletion fails', () => {
+ beforeEach(() => {
+ bulkRunnerDeleteHandler.mockRejectedValue(new Error('error!'));
+
+ evt = {
+ preventDefault: jest.fn(),
+ };
+ findModal().vm.$emit('primary', evt);
+ });
+
+ it('has loading state', async () => {
+ expect(findModal().props('actionPrimary').attributes.loading).toBe(true);
+ expect(findModal().props('actionCancel').attributes.loading).toBe(true);
+
+ await waitForPromises();
+
+ expect(findModal().props('actionPrimary').attributes.loading).toBe(false);
+ expect(findModal().props('actionCancel').attributes.loading).toBe(false);
+ });
+
+ it('modal is not prevented from closing', () => {
+ expect(evt.preventDefault).toHaveBeenCalledTimes(1);
+ });
+
+ it('mutation is called', () => {
+ expect(bulkRunnerDeleteHandler).toHaveBeenCalledWith({
+ input: { ids: mockCheckedRunnerIds },
+ });
+ });
+
+ it('user interface is not updated', async () => {
+ await waitForPromises();
- findDeleteBtn().vm.$emit('click');
+ const { evict, gc } = apolloCache;
- expect(confirmAction).toHaveBeenCalledTimes(1);
+ expect(evict).not.toHaveBeenCalled();
+ expect(gc).not.toHaveBeenCalled();
+ expect(mockState.localMutations.clearChecked).not.toHaveBeenCalled();
+ });
- const [, confirmOptions] = confirmAction.mock.calls[0];
- const { title, modalHtmlMessage, primaryBtnText } = confirmOptions;
+ it('alert is called', async () => {
+ await waitForPromises();
- expect(title).toMatch(text);
- expect(primaryBtnText).toMatch(text);
- expect(modalHtmlMessage).toMatch(`<strong>${count}</strong>`);
+ expect(createAlert).toHaveBeenCalled();
+ expect(createAlert).toHaveBeenCalledWith({
+ message: expect.any(String),
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
});
});
});
diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
index 83fb1764c6d..e35bec3aa38 100644
--- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
@@ -143,7 +143,7 @@ describe('RunnerList', () => {
runnerType: INSTANCE_TYPE,
filters: mockFilters,
sort: mockOtherSort,
- pagination: { page: 1 },
+ pagination: {},
});
});
});
@@ -156,7 +156,7 @@ describe('RunnerList', () => {
runnerType: null,
filters: mockFilters,
sort: mockDefaultSort,
- pagination: { page: 1 },
+ pagination: {},
});
});
@@ -167,7 +167,7 @@ describe('RunnerList', () => {
runnerType: null,
filters: [],
sort: mockOtherSort,
- pagination: { page: 1 },
+ pagination: {},
});
});
});
diff --git a/spec/frontend/runner/components/runner_jobs_spec.js b/spec/frontend/runner/components/runner_jobs_spec.js
index 20582aaaf40..4d38afb25ee 100644
--- a/spec/frontend/runner/components/runner_jobs_spec.js
+++ b/spec/frontend/runner/components/runner_jobs_spec.js
@@ -73,8 +73,7 @@ describe('RunnerJobs', () => {
it('Shows jobs', () => {
const jobs = findRunnerJobsTable().props('jobs');
- expect(jobs).toHaveLength(mockJobs.length);
- expect(jobs[0]).toMatchObject(mockJobs[0]);
+ expect(jobs).toEqual(mockJobs);
});
describe('When "Next" page is clicked', () => {
diff --git a/spec/frontend/runner/components/runner_list_spec.js b/spec/frontend/runner/components/runner_list_spec.js
index eca4bbc3490..7b58a81bb0d 100644
--- a/spec/frontend/runner/components/runner_list_spec.js
+++ b/spec/frontend/runner/components/runner_list_spec.js
@@ -88,9 +88,7 @@ describe('RunnerList', () => {
createComponent({}, mountExtended);
// Badges
- expect(findCell({ fieldKey: 'status' }).text()).toMatchInterpolatedText(
- 'never contacted paused',
- );
+ expect(findCell({ fieldKey: 'status' }).text()).toMatchInterpolatedText('never contacted');
// Runner summary
expect(findCell({ fieldKey: 'summary' }).text()).toContain(
@@ -124,10 +122,10 @@ describe('RunnerList', () => {
expect(findCell({ fieldKey: 'checkbox' }).find('input').exists()).toBe(true);
});
- it('Emits a checked event', () => {
+ it('Emits a checked event', async () => {
const checkbox = findCell({ fieldKey: 'checkbox' }).find('input');
- checkbox.setChecked();
+ await checkbox.setChecked();
expect(wrapper.emitted('checked')).toHaveLength(1);
expect(wrapper.emitted('checked')[0][0]).toEqual({
diff --git a/spec/frontend/runner/components/runner_pagination_spec.js b/spec/frontend/runner/components/runner_pagination_spec.js
index e144b52ceb3..499cc59250d 100644
--- a/spec/frontend/runner/components/runner_pagination_spec.js
+++ b/spec/frontend/runner/components/runner_pagination_spec.js
@@ -1,5 +1,5 @@
-import { GlPagination } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { GlKeysetPagination } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
const mockStartCursor = 'START_CURSOR';
@@ -8,21 +8,11 @@ const mockEndCursor = 'END_CURSOR';
describe('RunnerPagination', () => {
let wrapper;
- const findPagination = () => wrapper.findComponent(GlPagination);
+ const findPagination = () => wrapper.findComponent(GlKeysetPagination);
- const createComponent = ({ page = 1, hasPreviousPage = false, hasNextPage = true } = {}) => {
- wrapper = mount(RunnerPagination, {
- propsData: {
- value: {
- page,
- },
- pageInfo: {
- hasPreviousPage,
- hasNextPage,
- startCursor: mockStartCursor,
- endCursor: mockEndCursor,
- },
- },
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(RunnerPagination, {
+ propsData,
});
};
@@ -30,114 +20,96 @@ describe('RunnerPagination', () => {
wrapper.destroy();
});
- describe('When on the first page', () => {
- beforeEach(() => {
- createComponent({
- page: 1,
- hasPreviousPage: false,
- hasNextPage: true,
- });
- });
-
- it('Contains the current page information', () => {
- expect(findPagination().props('value')).toBe(1);
- expect(findPagination().props('prevPage')).toBe(null);
- expect(findPagination().props('nextPage')).toBe(2);
- });
-
- it('Goes to the second page', () => {
- findPagination().vm.$emit('input', 2);
-
- expect(wrapper.emitted('input')[0]).toEqual([
- {
- after: mockEndCursor,
- page: 2,
- },
- ]);
- });
- });
-
describe('When in between pages', () => {
+ const mockPageInfo = {
+ startCursor: mockStartCursor,
+ endCursor: mockEndCursor,
+ hasPreviousPage: true,
+ hasNextPage: true,
+ };
+
beforeEach(() => {
createComponent({
- page: 2,
- hasPreviousPage: true,
- hasNextPage: true,
+ pageInfo: mockPageInfo,
});
});
it('Contains the current page information', () => {
- expect(findPagination().props('value')).toBe(2);
- expect(findPagination().props('prevPage')).toBe(1);
- expect(findPagination().props('nextPage')).toBe(3);
+ expect(findPagination().props()).toMatchObject(mockPageInfo);
});
- it('Shows the next and previous pages', () => {
- const links = findPagination().findAll('a');
-
- expect(links).toHaveLength(2);
- expect(links.at(0).text()).toBe('Previous');
- expect(links.at(1).text()).toBe('Next');
- });
-
- it('Goes to the last page', () => {
- findPagination().vm.$emit('input', 3);
+ it('Goes to the prev page', () => {
+ findPagination().vm.$emit('prev');
expect(wrapper.emitted('input')[0]).toEqual([
{
- after: mockEndCursor,
- page: 3,
+ before: mockStartCursor,
},
]);
});
- it('Goes to the first page', () => {
- findPagination().vm.$emit('input', 1);
+ it('Goes to the next page', () => {
+ findPagination().vm.$emit('next');
expect(wrapper.emitted('input')[0]).toEqual([
{
- page: 1,
+ after: mockEndCursor,
},
]);
});
});
- describe('When in the last page', () => {
+ describe.each`
+ page | hasPreviousPage | hasNextPage
+ ${'first'} | ${false} | ${true}
+ ${'last'} | ${true} | ${false}
+ `('When on the $page page', ({ page, hasPreviousPage, hasNextPage }) => {
+ const mockPageInfo = {
+ startCursor: mockStartCursor,
+ endCursor: mockEndCursor,
+ hasPreviousPage,
+ hasNextPage,
+ };
+
beforeEach(() => {
createComponent({
- page: 3,
- hasPreviousPage: true,
- hasNextPage: false,
+ pageInfo: mockPageInfo,
});
});
- it('Contains the current page', () => {
- expect(findPagination().props('value')).toBe(3);
- expect(findPagination().props('prevPage')).toBe(2);
- expect(findPagination().props('nextPage')).toBe(null);
+ it(`Contains the ${page} page information`, () => {
+ expect(findPagination().props()).toMatchObject(mockPageInfo);
});
});
- describe('When only one page', () => {
+ describe('When no other pages', () => {
beforeEach(() => {
createComponent({
- page: 1,
- hasPreviousPage: false,
- hasNextPage: false,
+ pageInfo: {
+ hasPreviousPage: false,
+ hasNextPage: false,
+ },
});
});
- it('does not display pagination', () => {
- expect(wrapper.html()).toBe('');
+ it('is not shown', () => {
+ expect(findPagination().exists()).toBe(false);
});
+ });
- it('Contains the current page', () => {
- expect(findPagination().props('value')).toBe(1);
+ describe('When adding more attributes', () => {
+ beforeEach(() => {
+ createComponent({
+ pageInfo: {
+ hasPreviousPage: true,
+ hasNextPage: false,
+ },
+ disabled: true,
+ });
});
- it('Shows no more page buttons', () => {
- expect(findPagination().props('prevPage')).toBe(null);
- expect(findPagination().props('nextPage')).toBe(null);
+ it('attributes are passed', () => {
+ expect(findPagination().props('disabled')).toBe(true);
});
});
});
diff --git a/spec/frontend/runner/components/runner_projects_spec.js b/spec/frontend/runner/components/runner_projects_spec.js
index 6932b3b5197..c988fb8477d 100644
--- a/spec/frontend/runner/components/runner_projects_spec.js
+++ b/spec/frontend/runner/components/runner_projects_spec.js
@@ -95,6 +95,7 @@ describe('RunnerProjects', () => {
name,
fullName: nameWithNamespace,
avatarUrl,
+ isOwner: true, // first project is always owner
});
});
diff --git a/spec/frontend/runner/components/stat/runner_count_spec.js b/spec/frontend/runner/components/stat/runner_count_spec.js
index 89b51b1b4a7..2a6a745099f 100644
--- a/spec/frontend/runner/components/stat/runner_count_spec.js
+++ b/spec/frontend/runner/components/stat/runner_count_spec.js
@@ -7,8 +7,8 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { captureException } from '~/runner/sentry_utils';
-import allRunnersCountQuery from '~/runner/graphql/list/all_runners_count.query.graphql';
-import groupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
+import allRunnersCountQuery from 'ee_else_ce/runner/graphql/list/all_runners_count.query.graphql';
+import groupRunnersCountQuery from 'ee_else_ce/runner/graphql/list/group_runners_count.query.graphql';
import { runnersCountData, groupRunnersCountData } from '../../mock_data';
diff --git a/spec/frontend/runner/components/stat/runner_single_stat_spec.js b/spec/frontend/runner/components/stat/runner_single_stat_spec.js
new file mode 100644
index 00000000000..964a6a6ff71
--- /dev/null
+++ b/spec/frontend/runner/components/stat/runner_single_stat_spec.js
@@ -0,0 +1,61 @@
+import { GlSingleStat } from '@gitlab/ui/dist/charts';
+import { shallowMount } from '@vue/test-utils';
+import RunnerSingleStat from '~/runner/components/stat/runner_single_stat.vue';
+import RunnerCount from '~/runner/components/stat/runner_count.vue';
+import { INSTANCE_TYPE, GROUP_TYPE } from '~/runner/constants';
+
+describe('RunnerStats', () => {
+ let wrapper;
+
+ const findRunnerCount = () => wrapper.findComponent(RunnerCount);
+ const findGlSingleStat = () => wrapper.findComponent(GlSingleStat);
+
+ const createComponent = ({ props = {}, count, mountFn = shallowMount, ...options } = {}) => {
+ wrapper = mountFn(RunnerSingleStat, {
+ propsData: {
+ scope: INSTANCE_TYPE,
+ title: 'My title',
+ variables: {},
+ ...props,
+ },
+ stubs: {
+ RunnerCount: {
+ props: ['scope', 'variables', 'skip'],
+ render() {
+ return this.$scopedSlots.default({
+ count,
+ });
+ },
+ },
+ },
+ ...options,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ case | count | value
+ ${'number'} | ${99} | ${'99'}
+ ${'long number'} | ${1000} | ${'1,000'}
+ ${'empty number'} | ${null} | ${'-'}
+ `('formats $case', ({ count, value }) => {
+ createComponent({ count });
+
+ expect(findGlSingleStat().props('value')).toBe(value);
+ });
+
+ it('Passes runner count props', () => {
+ const props = {
+ scope: GROUP_TYPE,
+ variables: { paused: true },
+ skip: true,
+ };
+
+ createComponent({ props });
+
+ expect(findRunnerCount().props()).toEqual(props);
+ });
+});
diff --git a/spec/frontend/runner/components/stat/runner_stats_spec.js b/spec/frontend/runner/components/stat/runner_stats_spec.js
index f1ba6403dfb..7f1f22be94f 100644
--- a/spec/frontend/runner/components/stat/runner_stats_spec.js
+++ b/spec/frontend/runner/components/stat/runner_stats_spec.js
@@ -1,15 +1,13 @@
import { shallowMount, mount } from '@vue/test-utils';
import { s__ } from '~/locale';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
-import RunnerCount from '~/runner/components/stat/runner_count.vue';
-import RunnerStatusStat from '~/runner/components/stat/runner_status_stat.vue';
+import RunnerSingleStat from '~/runner/components/stat/runner_single_stat.vue';
import { INSTANCE_TYPE, STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
describe('RunnerStats', () => {
let wrapper;
- const findRunnerCountAt = (i) => wrapper.findAllComponents(RunnerCount).at(i);
- const findRunnerStatusStatAt = (i) => wrapper.findAllComponents(RunnerStatusStat).at(i);
+ const findSingleStats = () => wrapper.findAllComponents(RunnerSingleStat).wrappers;
const createComponent = ({ props = {}, mountFn = shallowMount, ...options } = {}) => {
wrapper = mountFn(RunnerStats, {
@@ -53,31 +51,12 @@ describe('RunnerStats', () => {
expect(text).toMatch(`${s__('Runners|Stale runners')} 1`);
});
- it('Displays counts for filtered searches', () => {
- createComponent({ props: { variables: { paused: true } } });
+ it('Displays all counts for filtered searches', () => {
+ const mockVariables = { paused: true };
+ createComponent({ props: { variables: mockVariables } });
- expect(findRunnerCountAt(0).props('variables').paused).toBe(true);
- expect(findRunnerCountAt(1).props('variables').paused).toBe(true);
- expect(findRunnerCountAt(2).props('variables').paused).toBe(true);
- });
-
- it('Skips overlapping statuses', () => {
- createComponent({ props: { variables: { status: STATUS_ONLINE } } });
-
- expect(findRunnerCountAt(0).props('skip')).toBe(false);
- expect(findRunnerCountAt(1).props('skip')).toBe(true);
- expect(findRunnerCountAt(2).props('skip')).toBe(true);
- });
-
- it.each`
- i | status
- ${0} | ${STATUS_ONLINE}
- ${1} | ${STATUS_OFFLINE}
- ${2} | ${STATUS_STALE}
- `('Displays status $status at index $i', ({ i, status }) => {
- createComponent({ mountFn: mount });
-
- expect(findRunnerCountAt(i).props('variables').status).toBe(status);
- expect(findRunnerStatusStatAt(i).props('status')).toBe(status);
+ findSingleStats().forEach((stat) => {
+ expect(stat.props('variables')).toMatchObject(mockVariables);
+ });
});
});
diff --git a/spec/frontend/runner/components/stat/runner_status_stat_spec.js b/spec/frontend/runner/components/stat/runner_status_stat_spec.js
deleted file mode 100644
index 3218272eac7..00000000000
--- a/spec/frontend/runner/components/stat/runner_status_stat_spec.js
+++ /dev/null
@@ -1,67 +0,0 @@
-import { GlSingleStat } from '@gitlab/ui/dist/charts';
-import { shallowMount, mount } from '@vue/test-utils';
-import RunnerStatusStat from '~/runner/components/stat/runner_status_stat.vue';
-import { STATUS_ONLINE, STATUS_OFFLINE, STATUS_STALE } from '~/runner/constants';
-
-describe('RunnerStatusStat', () => {
- let wrapper;
-
- const findSingleStat = () => wrapper.findComponent(GlSingleStat);
-
- const createComponent = ({ props = {} } = {}, mountFn = shallowMount) => {
- wrapper = mountFn(RunnerStatusStat, {
- propsData: {
- status: STATUS_ONLINE,
- value: 99,
- ...props,
- },
- });
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe.each`
- status | variant | title | badge
- ${STATUS_ONLINE} | ${'success'} | ${'Online runners'} | ${'online'}
- ${STATUS_OFFLINE} | ${'muted'} | ${'Offline runners'} | ${'offline'}
- ${STATUS_STALE} | ${'warning'} | ${'Stale runners'} | ${'stale'}
- `('Renders a stat for status "$status"', ({ status, variant, title, badge }) => {
- beforeEach(() => {
- createComponent({ props: { status } }, mount);
- });
-
- it('Renders text', () => {
- expect(wrapper.text()).toMatch(new RegExp(`${title} 99\\s+${badge}`));
- });
-
- it(`Uses variant ${variant}`, () => {
- expect(findSingleStat().props('variant')).toBe(variant);
- });
- });
-
- it('Formats stat number', () => {
- createComponent({ props: { value: 1000 } }, mount);
-
- expect(wrapper.text()).toMatch('Online runners 1,000');
- });
-
- it('Shows a null result', () => {
- createComponent({ props: { value: null } }, mount);
-
- expect(wrapper.text()).toMatch('Online runners -');
- });
-
- it('Shows an undefined result', () => {
- createComponent({ props: { value: undefined } }, mount);
-
- expect(wrapper.text()).toMatch('Online runners -');
- });
-
- it('Shows result for an unknown status', () => {
- createComponent({ props: { status: 'UNKNOWN' } }, mount);
-
- expect(wrapper.text()).toMatch('Runners 99');
- });
-});
diff --git a/spec/frontend/runner/graphql/local_state_spec.js b/spec/frontend/runner/graphql/local_state_spec.js
index 5c4302e4aa2..ae874fef00d 100644
--- a/spec/frontend/runner/graphql/local_state_spec.js
+++ b/spec/frontend/runner/graphql/local_state_spec.js
@@ -1,6 +1,8 @@
+import { gql } from '@apollo/client/core';
import createApolloClient from '~/lib/graphql';
import { createLocalState } from '~/runner/graphql/list/local_state';
import getCheckedRunnerIdsQuery from '~/runner/graphql/list/checked_runner_ids.query.graphql';
+import { RUNNER_TYPENAME } from '~/runner/constants';
describe('~/runner/graphql/list/local_state', () => {
let localState;
@@ -18,6 +20,21 @@ describe('~/runner/graphql/list/local_state', () => {
apolloClient = createApolloClient({}, { cacheConfig, typeDefs });
};
+ const addMockRunnerToCache = (id) => {
+ // mock some runners in the cache to prevent dangling references
+ apolloClient.writeFragment({
+ id: `${RUNNER_TYPENAME}:${id}`,
+ fragment: gql`
+ fragment DummyRunner on CiRunner {
+ __typename
+ }
+ `,
+ data: {
+ __typename: RUNNER_TYPENAME,
+ },
+ });
+ };
+
const queryCheckedRunnerIds = () => {
const { checkedRunnerIds } = apolloClient.readQuery({
query: getCheckedRunnerIdsQuery,
@@ -34,10 +51,25 @@ describe('~/runner/graphql/list/local_state', () => {
apolloClient = null;
});
- describe('default', () => {
- it('has empty checked list', () => {
+ describe('queryCheckedRunnerIds', () => {
+ it('has empty checked list by default', () => {
expect(queryCheckedRunnerIds()).toEqual([]);
});
+
+ it('returns checked runners that have a reference in the cache', () => {
+ addMockRunnerToCache('a');
+ localState.localMutations.setRunnerChecked({ runner: { id: 'a' }, isChecked: true });
+
+ expect(queryCheckedRunnerIds()).toEqual(['a']);
+ });
+
+ it('return checked runners that are not dangling references', () => {
+ addMockRunnerToCache('a'); // 'b' is missing from the cache, perhaps because it was deleted
+ localState.localMutations.setRunnerChecked({ runner: { id: 'a' }, isChecked: true });
+ localState.localMutations.setRunnerChecked({ runner: { id: 'b' }, isChecked: true });
+
+ expect(queryCheckedRunnerIds()).toEqual(['a']);
+ });
});
describe.each`
@@ -48,6 +80,7 @@ describe('~/runner/graphql/list/local_state', () => {
`('setRunnerChecked', ({ inputs, expected }) => {
beforeEach(() => {
inputs.forEach(([id, isChecked]) => {
+ addMockRunnerToCache(id);
localState.localMutations.setRunnerChecked({ runner: { id }, isChecked });
});
});
@@ -56,9 +89,34 @@ describe('~/runner/graphql/list/local_state', () => {
});
});
+ describe.each`
+ inputs | expected
+ ${[[['a', 'b'], true]]} | ${['a', 'b']}
+ ${[[['a', 'b'], false]]} | ${[]}
+ ${[[['a', 'b'], true], [['c', 'd'], true]]} | ${['a', 'b', 'c', 'd']}
+ ${[[['a', 'b'], true], [['a', 'b'], false]]} | ${[]}
+ ${[[['a', 'b'], true], [['b'], false]]} | ${['a']}
+ `('setRunnersChecked', ({ inputs, expected }) => {
+ beforeEach(() => {
+ inputs.forEach(([ids, isChecked]) => {
+ ids.forEach(addMockRunnerToCache);
+
+ localState.localMutations.setRunnersChecked({
+ runners: ids.map((id) => ({ id })),
+ isChecked,
+ });
+ });
+ });
+
+ it(`for inputs="${inputs}" has a ids="[${expected}]"`, () => {
+ expect(queryCheckedRunnerIds()).toEqual(expected);
+ });
+ });
+
describe('clearChecked', () => {
it('clears all checked items', () => {
['a', 'b', 'c'].forEach((id) => {
+ addMockRunnerToCache(id);
localState.localMutations.setRunnerChecked({ runner: { id }, isChecked: true });
});
diff --git a/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
index 2065874c288..cee1d436942 100644
--- a/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
+++ b/spec/frontend/runner/group_runner_show/group_runner_show_app_spec.js
@@ -92,10 +92,10 @@ describe('GroupRunnerShowApp', () => {
});
it('shows basic runner details', () => {
- const expected = `Description Instance runner
+ const expected = `Description My Runner
Last contact Never contacted
Version 1.0.0
- IP Address 127.0.0.1
+ IP Address None
Executor None
Architecture None
Platform darwin
@@ -178,13 +178,10 @@ describe('GroupRunnerShowApp', () => {
});
describe('When loading', () => {
- beforeEach(() => {
+ it('does not show runner details', () => {
mockRunnerQueryResult();
createComponent();
- });
-
- it('does not show runner details', () => {
expect(findRunnerDetails().exists()).toBe(false);
});
});
diff --git a/spec/frontend/runner/group_runners/group_runners_app_spec.js b/spec/frontend/runner/group_runners/group_runners_app_spec.js
index 9c42b0d6865..57d64202219 100644
--- a/spec/frontend/runner/group_runners/group_runners_app_spec.js
+++ b/spec/frontend/runner/group_runners/group_runners_app_spec.js
@@ -13,13 +13,13 @@ import { createAlert } from '~/flash';
import { s__ } from '~/locale';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { updateHistory } from '~/lib/utils/url_utility';
+import { upgradeStatusTokenConfig } from 'ee_else_ce/runner/components/search_tokens/upgrade_status_token_config';
import RunnerTypeTabs from '~/runner/components/runner_type_tabs.vue';
import RunnerFilteredSearchBar from '~/runner/components/runner_filtered_search_bar.vue';
import RunnerList from '~/runner/components/runner_list.vue';
import RunnerListEmptyState from '~/runner/components/runner_list_empty_state.vue';
import RunnerStats from '~/runner/components/stat/runner_stats.vue';
-import RunnerCount from '~/runner/components/stat/runner_count.vue';
import RunnerActionsCell from '~/runner/components/cells/runner_actions_cell.vue';
import RegistrationDropdown from '~/runner/components/registration/registration_dropdown.vue';
import RunnerPagination from '~/runner/components/runner_pagination.vue';
@@ -32,15 +32,14 @@ import {
GROUP_TYPE,
PARAM_KEY_PAUSED,
PARAM_KEY_STATUS,
- PARAM_KEY_TAG,
STATUS_ONLINE,
STATUS_OFFLINE,
STATUS_STALE,
RUNNER_PAGE_SIZE,
I18N_EDIT,
} from '~/runner/constants';
-import groupRunnersQuery from '~/runner/graphql/list/group_runners.query.graphql';
-import groupRunnersCountQuery from '~/runner/graphql/list/group_runners_count.query.graphql';
+import groupRunnersQuery from 'ee_else_ce/runner/graphql/list/group_runners.query.graphql';
+import groupRunnersCountQuery from 'ee_else_ce/runner/graphql/list/group_runners_count.query.graphql';
import GroupRunnersApp from '~/runner/group_runners/group_runners_app.vue';
import { captureException } from '~/runner/sentry_utils';
import {
@@ -49,6 +48,7 @@ import {
groupRunnersCountData,
onlineContactTimeoutSecs,
staleTimeoutSecs,
+ emptyPageInfo,
emptyStateSvgPath,
emptyStateFilteredSvgPath,
} from '../mock_data';
@@ -82,7 +82,7 @@ describe('GroupRunnersApp', () => {
const findRunnerListEmptyState = () => wrapper.findComponent(RunnerListEmptyState);
const findRunnerRow = (id) => extendedWrapper(wrapper.findByTestId(`runner-row-${id}`));
const findRunnerPagination = () => extendedWrapper(wrapper.findComponent(RunnerPagination));
- const findRunnerPaginationNext = () => findRunnerPagination().findByLabelText('Go to next page');
+ const findRunnerPaginationNext = () => findRunnerPagination().findByText(s__('Pagination|Next'));
const findRunnerFilteredSearchBar = () => wrapper.findComponent(RunnerFilteredSearchBar);
const createComponent = ({ props = {}, mountFn = shallowMountExtended, ...options } = {}) => {
@@ -111,7 +111,7 @@ describe('GroupRunnersApp', () => {
return waitForPromises();
};
- beforeEach(async () => {
+ beforeEach(() => {
mockGroupRunnersHandler.mockResolvedValue(groupRunnersData);
mockGroupRunnersCountHandler.mockResolvedValue(groupRunnersCountData);
});
@@ -197,6 +197,7 @@ describe('GroupRunnersApp', () => {
type: PARAM_KEY_STATUS,
options: expect.any(Array),
}),
+ upgradeStatusTokenConfig,
]);
});
@@ -254,12 +255,7 @@ describe('GroupRunnersApp', () => {
beforeEach(async () => {
setWindowLocation(`?status[]=${STATUS_ONLINE}&runner_type[]=${INSTANCE_TYPE}`);
- await createComponent({
- stubs: {
- RunnerStats,
- RunnerCount,
- },
- });
+ await createComponent({ mountFn: mountExtended });
});
it('sets the filters in the search bar', () => {
@@ -267,7 +263,7 @@ describe('GroupRunnersApp', () => {
runnerType: INSTANCE_TYPE,
filters: [{ type: 'status', value: { data: STATUS_ONLINE, operator: '=' } }],
sort: 'CREATED_DESC',
- pagination: { page: 1 },
+ pagination: {},
});
});
@@ -292,19 +288,11 @@ describe('GroupRunnersApp', () => {
describe('when a filter is selected by the user', () => {
beforeEach(async () => {
- createComponent({
- stubs: {
- RunnerStats,
- RunnerCount,
- },
- });
+ await createComponent({ mountFn: mountExtended });
findRunnerFilteredSearchBar().vm.$emit('input', {
runnerType: null,
- filters: [
- { type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } },
- { type: PARAM_KEY_TAG, value: { data: 'tag1', operator: '=' } },
- ],
+ filters: [{ type: PARAM_KEY_STATUS, value: { data: STATUS_ONLINE, operator: '=' } }],
sort: CREATED_ASC,
});
@@ -314,7 +302,7 @@ describe('GroupRunnersApp', () => {
it('updates the browser url', () => {
expect(updateHistory).toHaveBeenLastCalledWith({
title: expect.any(String),
- url: expect.stringContaining('?status[]=ONLINE&tag[]=tag1&sort=CREATED_ASC'),
+ url: expect.stringContaining('?status[]=ONLINE&sort=CREATED_ASC'),
});
});
@@ -322,7 +310,6 @@ describe('GroupRunnersApp', () => {
expect(mockGroupRunnersHandler).toHaveBeenLastCalledWith({
groupFullPath: mockGroupFullPath,
status: STATUS_ONLINE,
- tagList: ['tag1'],
sort: CREATED_ASC,
first: RUNNER_PAGE_SIZE,
});
@@ -331,7 +318,6 @@ describe('GroupRunnersApp', () => {
it('fetches count results for requested status', () => {
expect(mockGroupRunnersCountHandler).toHaveBeenCalledWith({
groupFullPath: mockGroupFullPath,
- tagList: ['tag1'],
status: STATUS_ONLINE,
});
});
@@ -340,6 +326,7 @@ describe('GroupRunnersApp', () => {
it('when runners have not loaded, shows a loading state', () => {
createComponent();
expect(findRunnerList().props('loading')).toBe(true);
+ expect(findRunnerPagination().attributes('disabled')).toBe('true');
});
describe('when no runners are found', () => {
@@ -348,13 +335,20 @@ describe('GroupRunnersApp', () => {
data: {
group: {
id: '1',
- runners: { nodes: [] },
+ runners: {
+ edges: [],
+ pageInfo: emptyPageInfo,
+ },
},
},
});
await createComponent();
});
+ it('shows no errors', () => {
+ expect(createAlert).not.toHaveBeenCalled();
+ });
+
it('shows an empty state', async () => {
expect(findRunnerListEmptyState().exists()).toBe(true);
});
@@ -379,12 +373,18 @@ describe('GroupRunnersApp', () => {
});
describe('Pagination', () => {
+ const { pageInfo } = groupRunnersDataPaginated.data.group.runners;
+
beforeEach(async () => {
mockGroupRunnersHandler.mockResolvedValue(groupRunnersDataPaginated);
await createComponent({ mountFn: mountExtended });
});
+ it('passes the page info', () => {
+ expect(findRunnerPagination().props('pageInfo')).toEqual(pageInfo);
+ });
+
it('navigates to the next page', async () => {
await findRunnerPaginationNext().trigger('click');
@@ -392,7 +392,7 @@ describe('GroupRunnersApp', () => {
groupFullPath: mockGroupFullPath,
sort: CREATED_DESC,
first: RUNNER_PAGE_SIZE,
- after: groupRunnersDataPaginated.data.group.runners.pageInfo.endCursor,
+ after: pageInfo.endCursor,
});
});
});
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index e5472ace817..555ec40184f 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -19,6 +19,14 @@ import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runne
import { RUNNER_PAGE_SIZE } from '~/runner/constants';
+const emptyPageInfo = {
+ __typename: 'PageInfo',
+ hasNextPage: false,
+ hasPreviousPage: false,
+ startCursor: '',
+ endCursor: '',
+};
+
// Other mock data
// Mock searches and their corresponding urls
@@ -26,7 +34,7 @@ export const mockSearchExamples = [
{
name: 'a default query',
urlQuery: '',
- search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
+ search: { runnerType: null, filters: [], pagination: {}, sort: 'CREATED_DESC' },
graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
isDefault: true,
},
@@ -36,7 +44,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -52,7 +60,7 @@ export const mockSearchExamples = [
value: { data: 'something' },
},
],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -72,7 +80,7 @@ export const mockSearchExamples = [
value: { data: 'else' },
},
],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -83,7 +91,7 @@ export const mockSearchExamples = [
search: {
runnerType: 'INSTANCE_TYPE',
filters: [],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -97,7 +105,7 @@ export const mockSearchExamples = [
{ type: 'status', value: { data: 'ACTIVE', operator: '=' } },
{ type: 'status', value: { data: 'PAUSED', operator: '=' } },
],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -108,7 +116,7 @@ export const mockSearchExamples = [
search: {
runnerType: 'INSTANCE_TYPE',
filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_ASC',
},
graphqlVariables: {
@@ -124,7 +132,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: {
@@ -142,7 +150,7 @@ export const mockSearchExamples = [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: {
@@ -153,22 +161,22 @@ export const mockSearchExamples = [
},
{
name: 'the next page',
- urlQuery: '?page=2&after=AFTER_CURSOR',
+ urlQuery: '?after=AFTER_CURSOR',
search: {
runnerType: null,
filters: [],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
+ pagination: { after: 'AFTER_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
},
{
name: 'the previous page',
- urlQuery: '?page=2&before=BEFORE_CURSOR',
+ urlQuery: '?before=BEFORE_CURSOR',
search: {
runnerType: null,
filters: [],
- pagination: { page: 2, before: 'BEFORE_CURSOR' },
+ pagination: { before: 'BEFORE_CURSOR' },
sort: 'CREATED_DESC',
},
graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
@@ -176,7 +184,7 @@ export const mockSearchExamples = [
{
name: 'the next page filtered by a status, an instance type, tags and a non default sort',
urlQuery:
- '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
+ '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&after=AFTER_CURSOR',
search: {
runnerType: 'INSTANCE_TYPE',
filters: [
@@ -184,7 +192,7 @@ export const mockSearchExamples = [
{ type: 'tag', value: { data: 'tag-1', operator: '=' } },
{ type: 'tag', value: { data: 'tag-2', operator: '=' } },
],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
+ pagination: { after: 'AFTER_CURSOR' },
sort: 'CREATED_ASC',
},
graphqlVariables: {
@@ -202,7 +210,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -213,7 +221,7 @@ export const mockSearchExamples = [
search: {
runnerType: null,
filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
- pagination: { page: 1 },
+ pagination: {},
sort: 'CREATED_DESC',
},
graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
@@ -233,6 +241,7 @@ export {
groupRunnersData,
groupRunnersDataPaginated,
groupRunnersCountData,
+ emptyPageInfo,
runnerData,
runnerWithGroupData,
runnerProjectsData,
diff --git a/spec/frontend/runner/runner_search_utils_spec.js b/spec/frontend/runner/runner_search_utils_spec.js
index 6f954143ab1..e1f90482b34 100644
--- a/spec/frontend/runner/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_search_utils_spec.js
@@ -24,11 +24,14 @@ describe('search_params.js', () => {
});
it.each`
- query | updatedQuery
- ${'status[]=ACTIVE'} | ${'paused[]=false'}
- ${'status[]=ACTIVE&a=b'} | ${'a=b&paused[]=false'}
- ${'status[]=ACTIVE'} | ${'paused[]=false'}
- ${'status[]=PAUSED'} | ${'paused[]=true'}
+ query | updatedQuery
+ ${'status[]=ACTIVE'} | ${'paused[]=false'}
+ ${'status[]=ACTIVE&a=b'} | ${'a=b&paused[]=false'}
+ ${'status[]=ACTIVE'} | ${'paused[]=false'}
+ ${'status[]=PAUSED'} | ${'paused[]=true'}
+ ${'page=2&after=AFTER'} | ${'after=AFTER'}
+ ${'page=2&before=BEFORE'} | ${'before=BEFORE'}
+ ${'status[]=PAUSED&page=2&after=AFTER'} | ${'after=AFTER&paused[]=true'}
`('updates "$query" to "$updatedQuery"', ({ query, updatedQuery }) => {
const mockUrl = 'http://test.host/admin/runners?';
@@ -49,24 +52,6 @@ describe('search_params.js', () => {
{ type: 'filtered-search-term', value: { data: 'text' } },
]);
});
-
- it('When a page cannot be parsed as a number, it defaults to `1`', () => {
- expect(fromUrlQueryToSearch('?page=NONSENSE&after=AFTER_CURSOR').pagination).toEqual({
- page: 1,
- });
- });
-
- it('When a page is less than 1, it defaults to `1`', () => {
- expect(fromUrlQueryToSearch('?page=0&after=AFTER_CURSOR').pagination).toEqual({
- page: 1,
- });
- });
-
- it('When a page with no cursor is given, it defaults to `1`', () => {
- expect(fromUrlQueryToSearch('?page=2').pagination).toEqual({
- page: 1,
- });
- });
});
describe('fromSearchToUrl', () => {
@@ -143,8 +128,11 @@ describe('search_params.js', () => {
});
});
- it('given a missing pagination, evaluates as not filtered', () => {
- expect(isSearchFiltered({ pagination: null })).toBe(false);
- });
+ it.each([null, undefined, {}])(
+ 'given a missing pagination, evaluates as not filtered',
+ (mockPagination) => {
+ expect(isSearchFiltered({ pagination: mockPagination })).toBe(false);
+ },
+ );
});
});
diff --git a/spec/frontend/security_configuration/components/training_provider_list_spec.js b/spec/frontend/security_configuration/components/training_provider_list_spec.js
index 184c16fda6e..b6451af57d7 100644
--- a/spec/frontend/security_configuration/components/training_provider_list_spec.js
+++ b/spec/frontend/security_configuration/components/training_provider_list_spec.js
@@ -402,7 +402,7 @@ describe('TrainingProviderList component', () => {
it('has disabled state for radio', () => {
findPrimaryProviderRadios().wrappers.forEach((radio) => {
- expect(radio.attributes('disabled')).toBeTruthy();
+ expect(radio.attributes('disabled')).toBe('true');
});
});
diff --git a/spec/frontend/sidebar/assignees_spec.js b/spec/frontend/sidebar/assignees_spec.js
index a4474ead956..c2aff456abb 100644
--- a/spec/frontend/sidebar/assignees_spec.js
+++ b/spec/frontend/sidebar/assignees_spec.js
@@ -70,7 +70,7 @@ describe('Assignee component', () => {
wrapper.find('[data-testid="assign-yourself"]').trigger('click');
await nextTick();
- expect(wrapper.emitted('assign-self')).toBeTruthy();
+ expect(wrapper.emitted('assign-self')).toHaveLength(1);
});
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
index 5fd364afbe4..88015ed42a3 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -148,6 +148,7 @@ describe('Sidebar assignees widget', () => {
expect(findAssignees().props('users')).toEqual([
{
+ __typename: 'UserCore',
id: 'gid://gitlab/User/2',
avatarUrl:
'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
diff --git a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js b/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
deleted file mode 100644
index 58fa878a189..00000000000
--- a/spec/frontend/sidebar/components/attention_requested_toggle_spec.js
+++ /dev/null
@@ -1,121 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import AttentionRequestedToggle from '~/sidebar/components/attention_requested_toggle.vue';
-
-let wrapper;
-
-function factory(propsData = {}) {
- wrapper = mount(AttentionRequestedToggle, { propsData });
-}
-
-const findToggle = () => wrapper.findComponent(GlButton);
-
-describe('Attention require toggle', () => {
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders button', () => {
- factory({
- type: 'reviewer',
- user: { attention_requested: false, can_update_merge_request: true },
- });
-
- expect(findToggle().exists()).toBe(true);
- });
-
- it.each`
- attentionRequested | icon
- ${true} | ${'attention-solid'}
- ${false} | ${'attention'}
- `(
- 'renders $icon icon when attention_requested is $attentionRequested',
- ({ attentionRequested, icon }) => {
- factory({
- type: 'reviewer',
- user: { attention_requested: attentionRequested, can_update_merge_request: true },
- });
-
- expect(findToggle().props('icon')).toBe(icon);
- },
- );
-
- it.each`
- attentionRequested | selected
- ${true} | ${true}
- ${false} | ${false}
- `(
- 'renders button with as selected when $selected when attention_requested is $attentionRequested',
- ({ attentionRequested, selected }) => {
- factory({
- type: 'reviewer',
- user: { attention_requested: attentionRequested, can_update_merge_request: true },
- });
-
- expect(findToggle().props('selected')).toBe(selected);
- },
- );
-
- it('emits toggle-attention-requested on click', async () => {
- factory({
- type: 'reviewer',
- user: { attention_requested: true, can_update_merge_request: true },
- });
-
- await findToggle().trigger('click');
-
- expect(wrapper.emitted('toggle-attention-requested')[0]).toEqual([
- {
- user: { attention_requested: true, can_update_merge_request: true },
- callback: expect.anything(),
- direction: 'remove',
- },
- ]);
- });
-
- it('does not emit toggle-attention-requested on click if can_update_merge_request is false', async () => {
- factory({
- type: 'reviewer',
- user: { attention_requested: true, can_update_merge_request: false },
- });
-
- await findToggle().trigger('click');
-
- expect(wrapper.emitted('toggle-attention-requested')).toBe(undefined);
- });
-
- it('sets loading on click', async () => {
- factory({
- type: 'reviewer',
- user: { attention_requested: true, can_update_merge_request: true },
- });
-
- await findToggle().trigger('click');
-
- expect(findToggle().props('loading')).toBe(true);
- });
-
- it.each`
- type | attentionRequested | tooltip | canUpdateMergeRequest
- ${'reviewer'} | ${true} | ${AttentionRequestedToggle.i18n.removeAttentionRequest} | ${true}
- ${'reviewer'} | ${false} | ${AttentionRequestedToggle.i18n.addAttentionRequest} | ${true}
- ${'assignee'} | ${false} | ${AttentionRequestedToggle.i18n.addAttentionRequest} | ${true}
- ${'reviewer'} | ${true} | ${AttentionRequestedToggle.i18n.attentionRequestedNoPermission} | ${false}
- ${'reviewer'} | ${false} | ${AttentionRequestedToggle.i18n.noAttentionRequestedNoPermission} | ${false}
- ${'assignee'} | ${true} | ${AttentionRequestedToggle.i18n.attentionRequestedNoPermission} | ${false}
- ${'assignee'} | ${false} | ${AttentionRequestedToggle.i18n.noAttentionRequestedNoPermission} | ${false}
- `(
- 'sets tooltip as $tooltip when attention_requested is $attentionRequested, type is $type and, can_update_merge_request is $canUpdateMergeRequest',
- ({ type, attentionRequested, tooltip, canUpdateMergeRequest }) => {
- factory({
- type,
- user: {
- attention_requested: attentionRequested,
- can_update_merge_request: canUpdateMergeRequest,
- },
- });
-
- expect(findToggle().attributes('aria-label')).toBe(tooltip);
- },
- );
-});
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
index 1ea035c7184..7775ed6aa37 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
@@ -71,7 +71,12 @@ describe('Sidebar Confidentiality Form', () => {
it('creates a flash if mutation contains errors', async () => {
createComponent({
mutate: jest.fn().mockResolvedValue({
- data: { issuableSetConfidential: { errors: ['Houston, we have a problem!'] } },
+ data: {
+ issuableSetConfidential: {
+ issuable: { confidential: false },
+ errors: ['Houston, we have a problem!'],
+ },
+ },
}),
});
findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
@@ -82,6 +87,24 @@ describe('Sidebar Confidentiality Form', () => {
});
});
+ it('emits `closeForm` event with confidentiality value when mutation is successful', async () => {
+ createComponent({
+ mutate: jest.fn().mockResolvedValue({
+ data: {
+ issuableSetConfidential: {
+ issuable: { confidential: true },
+ errors: [],
+ },
+ },
+ }),
+ });
+
+ findConfidentialToggle().vm.$emit('click', new MouseEvent('click'));
+ await waitForPromises();
+
+ expect(wrapper.emitted('closeForm')).toEqual([[{ confidential: true }]]);
+ });
+
describe('when issue is not confidential', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
index 1de71e52264..18ee423d12e 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
@@ -132,6 +132,7 @@ describe('Sidebar Confidentiality Widget', () => {
it('closes the form and dispatches an event when `closeForm` is emitted', async () => {
createComponent();
const el = wrapper.vm.$el;
+ const closeFormPayload = { confidential: true };
jest.spyOn(el, 'dispatchEvent');
await waitForPromises();
@@ -140,12 +141,12 @@ describe('Sidebar Confidentiality Widget', () => {
expect(findConfidentialityForm().isVisible()).toBe(true);
- findConfidentialityForm().vm.$emit('closeForm');
+ findConfidentialityForm().vm.$emit('closeForm', closeFormPayload);
await nextTick();
expect(findConfidentialityForm().isVisible()).toBe(false);
expect(el.dispatchEvent).toHaveBeenCalled();
- expect(wrapper.emitted('closeForm')).toHaveLength(1);
+ expect(wrapper.emitted('closeForm')).toEqual([[closeFormPayload]]);
});
it('emits `expandSidebar` event when it is emitted from child component', async () => {
diff --git a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
index 8d8c10d10f1..83764cb6739 100644
--- a/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
+++ b/spec/frontend/sidebar/components/incidents/escalation_status_spec.js
@@ -1,4 +1,5 @@
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import EscalationStatus from '~/sidebar/components/incidents/escalation_status.vue';
@@ -61,6 +62,8 @@ describe('EscalationStatus', () => {
createComponent();
// Open dropdown
await toggleDropdown();
+ jest.runOnlyPendingTimers();
+ await nextTick();
expect(findDropdownMenu().classes('show')).toBe(true);
@@ -74,6 +77,8 @@ describe('EscalationStatus', () => {
createComponent({ preventDropdownClose: true });
// Open dropdown
await toggleDropdown();
+ jest.runOnlyPendingTimers();
+ await nextTick();
expect(findDropdownMenu().classes('show')).toBe(true);
diff --git a/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js b/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js
index 338ecf944f3..859e63b3df6 100644
--- a/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js
+++ b/spec/frontend/sidebar/components/participants/sidebar_participants_widget_spec.js
@@ -1,7 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
-import { stripTypenames } from 'helpers/graphql_helpers';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Participants from '~/sidebar/components/participants/participants.vue';
@@ -67,11 +66,9 @@ describe('Sidebar Participants Widget', () => {
});
it('passes participants to child component', () => {
- const participantsWithoutTypename = stripTypenames(
+ expect(findParticipants().props('participants')).toEqual(
epicParticipantsResponse().data.workspace.issuable.participants.nodes,
);
-
- expect(findParticipants().props('participants')).toEqual(participantsWithoutTypename);
});
});
diff --git a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
index 8999f120a0f..2c24df2436a 100644
--- a/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
+++ b/spec/frontend/sidebar/components/reviewers/uncollapsed_reviewer_list_spec.js
@@ -1,9 +1,22 @@
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
-import AttentionRequestedToggle from '~/sidebar/components/attention_requested_toggle.vue';
import ReviewerAvatarLink from '~/sidebar/components/reviewers/reviewer_avatar_link.vue';
import UncollapsedReviewerList from '~/sidebar/components/reviewers/uncollapsed_reviewer_list.vue';
-import userDataMock from '../../user_data_mock';
+
+const userDataMock = () => ({
+ id: 1,
+ name: 'Root',
+ state: 'active',
+ username: 'root',
+ webUrl: `${TEST_HOST}/root`,
+ avatarUrl: `${TEST_HOST}/avatar/root.png`,
+ mergeRequestInteraction: {
+ canMerge: true,
+ canUpdate: true,
+ reviewed: true,
+ approved: false,
+ },
+});
describe('UncollapsedReviewerList component', () => {
let wrapper;
@@ -70,7 +83,10 @@ describe('UncollapsedReviewerList component', () => {
id: 2,
name: 'nonrooty-nonrootersen',
username: 'hello-world',
- approved: true,
+ mergeRequestInteraction: {
+ ...user.mergeRequestInteraction,
+ approved: true,
+ },
};
beforeEach(() => {
@@ -119,18 +135,4 @@ describe('UncollapsedReviewerList component', () => {
expect(wrapper.find('[data-testid="re-request-success"]').exists()).toBe(true);
});
});
-
- it('hides re-request review button when attentionRequired feature flag is enabled', () => {
- createComponent({ users: [userDataMock()] }, { mrAttentionRequests: true });
-
- expect(wrapper.findAll('[data-testid="re-request-button"]').length).toBe(0);
- });
-
- it('emits toggle-attention-requested', () => {
- createComponent({ users: [userDataMock()] }, { mrAttentionRequests: true });
-
- wrapper.find(AttentionRequestedToggle).vm.$emit('toggle-attention-requested', 'data');
-
- expect(wrapper.emitted('toggle-attention-requested')[0]).toEqual(['data']);
- });
});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index 229757ff40c..9c6e23e928c 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -343,6 +343,14 @@ export const issuableQueryResponse = {
__typename: 'Issue',
id: 'gid://gitlab/Issue/1',
iid: '1',
+ author: {
+ id: '1',
+ avatarUrl: '/avatar',
+ name: 'root',
+ username: 'root',
+ webUrl: 'root',
+ status: null,
+ },
assignees: {
nodes: [
{
@@ -450,7 +458,7 @@ export const subscriptionResponse = {
},
};
-const mockUser1 = {
+export const mockUser1 = {
__typename: 'UserCore',
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -459,6 +467,7 @@ const mockUser1 = {
username: 'root',
webUrl: '/root',
status: null,
+ canMerge: false,
};
export const mockUser2 = {
@@ -469,6 +478,7 @@ export const mockUser2 = {
username: 'rookie',
webUrl: 'rookie',
status: null,
+ canMerge: false,
};
export const searchResponse = {
diff --git a/spec/frontend/sidebar/reviewer_title_spec.js b/spec/frontend/sidebar/reviewer_title_spec.js
index 3c250be5d5e..6b4eed5ad0f 100644
--- a/spec/frontend/sidebar/reviewer_title_spec.js
+++ b/spec/frontend/sidebar/reviewer_title_spec.js
@@ -47,7 +47,7 @@ describe('ReviewerTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBeFalsy();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(false);
});
it('renders spinner when loading', () => {
@@ -57,7 +57,7 @@ describe('ReviewerTitle component', () => {
editable: false,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBeTruthy();
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
it('does not render edit link when not editable', () => {
diff --git a/spec/frontend/sidebar/reviewers_spec.js b/spec/frontend/sidebar/reviewers_spec.js
index 351dfc9a6ed..88bacc9b7f7 100644
--- a/spec/frontend/sidebar/reviewers_spec.js
+++ b/spec/frontend/sidebar/reviewers_spec.js
@@ -1,9 +1,23 @@
import { GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
-import UsersMockHelper from 'helpers/user_mock_data_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import Reviewer from '~/sidebar/components/reviewers/reviewers.vue';
-import UsersMock from './mock_data';
+
+const usersMock = (id = 1) => ({
+ id,
+ name: 'Root',
+ state: 'active',
+ username: 'root',
+ webUrl: `${TEST_HOST}/root`,
+ avatarUrl: `${TEST_HOST}/avatar/root.png`,
+ mergeRequestInteraction: {
+ canMerge: true,
+ canUpdate: true,
+ reviewed: true,
+ approved: false,
+ },
+});
describe('Reviewer component', () => {
const getDefaultProps = () => ({
@@ -42,23 +56,23 @@ describe('Reviewer component', () => {
it('displays one reviewer icon when collapsed', () => {
createWrapper({
...getDefaultProps(),
- users: [UsersMock.user],
+ users: [usersMock()],
});
const collapsedChildren = findCollapsedChildren();
const reviewer = collapsedChildren.at(0);
expect(collapsedChildren.length).toBe(1);
- expect(reviewer.find('.avatar').attributes('src')).toBe(UsersMock.user.avatar);
- expect(reviewer.find('.avatar').attributes('alt')).toBe(`${UsersMock.user.name}'s avatar`);
+ expect(reviewer.find('.avatar').attributes('src')).toContain('avatar/root.png');
+ expect(reviewer.find('.avatar').attributes('alt')).toBe(`Root's avatar`);
- expect(trimText(reviewer.find('.author').text())).toBe(UsersMock.user.name);
+ expect(trimText(reviewer.find('.author').text())).toBe('Root');
});
});
describe('Two or more reviewers/users', () => {
it('displays two reviewer icons when collapsed', () => {
- const users = UsersMockHelper.createNumberRandomUsers(2);
+ const users = [usersMock(), usersMock(2)];
createWrapper({
...getDefaultProps(),
users,
@@ -70,21 +84,21 @@ describe('Reviewer component', () => {
const first = collapsedChildren.at(0);
- expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatarUrl);
expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
expect(trimText(first.find('.author').text())).toBe(users[0].name);
const second = collapsedChildren.at(1);
- expect(second.find('.avatar').attributes('src')).toBe(users[1].avatar_url);
+ expect(second.find('.avatar').attributes('src')).toBe(users[1].avatarUrl);
expect(second.find('.avatar').attributes('alt')).toBe(`${users[1].name}'s avatar`);
expect(trimText(second.find('.author').text())).toBe(users[1].name);
});
it('displays one reviewer icon and counter when collapsed', () => {
- const users = UsersMockHelper.createNumberRandomUsers(3);
+ const users = [usersMock(), usersMock(2), usersMock(3)];
createWrapper({
...getDefaultProps(),
users,
@@ -96,7 +110,7 @@ describe('Reviewer component', () => {
const first = collapsedChildren.at(0);
- expect(first.find('.avatar').attributes('src')).toBe(users[0].avatar_url);
+ expect(first.find('.avatar').attributes('src')).toBe(users[0].avatarUrl);
expect(first.find('.avatar').attributes('alt')).toBe(`${users[0].name}'s avatar`);
expect(trimText(first.find('.author').text())).toBe(users[0].name);
@@ -107,7 +121,7 @@ describe('Reviewer component', () => {
});
it('Shows two reviewers', () => {
- const users = UsersMockHelper.createNumberRandomUsers(2);
+ const users = [usersMock(), usersMock(2)];
createWrapper({
...getDefaultProps(),
users,
@@ -118,10 +132,10 @@ describe('Reviewer component', () => {
});
it('shows sorted reviewer where "can merge" users are sorted first', () => {
- const users = UsersMockHelper.createNumberRandomUsers(3);
- users[0].can_merge = false;
- users[1].can_merge = false;
- users[2].can_merge = true;
+ const users = [usersMock(), usersMock(2), usersMock(3)];
+ users[0].mergeRequestInteraction.canMerge = false;
+ users[1].mergeRequestInteraction.canMerge = false;
+ users[2].mergeRequestInteraction.canMerge = true;
createWrapper({
...getDefaultProps(),
@@ -129,14 +143,14 @@ describe('Reviewer component', () => {
editable: true,
});
- expect(wrapper.vm.sortedReviewers[0].can_merge).toBe(true);
+ expect(wrapper.vm.sortedReviewers[0].mergeRequestInteraction.canMerge).toBe(true);
});
it('passes the sorted reviewers to the uncollapsed-reviewer-list', () => {
- const users = UsersMockHelper.createNumberRandomUsers(3);
- users[0].can_merge = false;
- users[1].can_merge = false;
- users[2].can_merge = true;
+ const users = [usersMock(), usersMock(2), usersMock(3)];
+ users[0].mergeRequestInteraction.canMerge = false;
+ users[1].mergeRequestInteraction.canMerge = false;
+ users[2].mergeRequestInteraction.canMerge = true;
createWrapper({
...getDefaultProps(),
@@ -149,10 +163,10 @@ describe('Reviewer component', () => {
});
it('passes the sorted reviewers to the collapsed-reviewer-list', () => {
- const users = UsersMockHelper.createNumberRandomUsers(3);
- users[0].can_merge = false;
- users[1].can_merge = false;
- users[2].can_merge = true;
+ const users = [usersMock(), usersMock(2), usersMock(3)];
+ users[0].mergeRequestInteraction.canMerge = false;
+ users[1].mergeRequestInteraction.canMerge = false;
+ users[2].mergeRequestInteraction.canMerge = true;
createWrapper({
...getDefaultProps(),
diff --git a/spec/frontend/sidebar/sidebar_mediator_spec.js b/spec/frontend/sidebar/sidebar_mediator_spec.js
index 82fb10ab1d2..e32694abcce 100644
--- a/spec/frontend/sidebar/sidebar_mediator_spec.js
+++ b/spec/frontend/sidebar/sidebar_mediator_spec.js
@@ -1,12 +1,9 @@
import MockAdapter from 'axios-mock-adapter';
-import createFlash from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as urlUtility from '~/lib/utils/url_utility';
import SidebarService, { gqClient } from '~/sidebar/services/sidebar_service';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import SidebarStore from '~/sidebar/stores/sidebar_store';
-import toast from '~/vue_shared/plugins/global_toast';
-import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
import Mock from './mock_data';
jest.mock('~/flash');
@@ -122,93 +119,4 @@ describe('Sidebar mediator', () => {
urlSpy.mockRestore();
});
});
-
- describe('toggleAttentionRequested', () => {
- let requestAttentionMock;
- let removeAttentionRequestMock;
-
- beforeEach(() => {
- requestAttentionMock = jest.spyOn(mediator.service, 'requestAttention').mockResolvedValue();
- removeAttentionRequestMock = jest
- .spyOn(mediator.service, 'removeAttentionRequest')
- .mockResolvedValue();
- });
-
- it.each`
- attentionIsCurrentlyRequested | serviceMethod
- ${true} | ${'remove'}
- ${false} | ${'add'}
- `(
- "calls the $serviceMethod service method when the user's attention request is set to $attentionIsCurrentlyRequested",
- async ({ serviceMethod }) => {
- const methods = {
- add: requestAttentionMock,
- remove: removeAttentionRequestMock,
- };
- mediator.store.reviewers = [{ id: 1, attention_requested: false, username: 'root' }];
-
- await mediator.toggleAttentionRequested('reviewer', {
- user: { id: 1, username: 'root' },
- callback: jest.fn(),
- direction: serviceMethod,
- });
-
- expect(methods[serviceMethod]).toHaveBeenCalledWith(1);
- expect(refreshUserMergeRequestCounts).toHaveBeenCalled();
- },
- );
-
- it.each`
- type | method
- ${'reviewer'} | ${'findReviewer'}
- `('finds $type', ({ type, method }) => {
- const methodSpy = jest.spyOn(mediator.store, method);
-
- mediator.toggleAttentionRequested(type, { user: { id: 1 }, callback: jest.fn() });
-
- expect(methodSpy).toHaveBeenCalledWith({ id: 1 });
- });
-
- it.each`
- attentionRequested | toastMessage
- ${true} | ${'Removed attention request from @root'}
- ${false} | ${'Requested attention from @root'}
- `(
- 'it creates toast $toastMessage when attention_requested is $attentionRequested',
- async ({ attentionRequested, toastMessage }) => {
- mediator.store.reviewers = [
- { id: 1, attention_requested: attentionRequested, username: 'root' },
- ];
-
- await mediator.toggleAttentionRequested('reviewer', {
- user: { id: 1, username: 'root' },
- callback: jest.fn(),
- });
-
- expect(toast).toHaveBeenCalledWith(toastMessage);
- },
- );
-
- describe('errors', () => {
- beforeEach(() => {
- jest
- .spyOn(mediator.service, 'removeAttentionRequest')
- .mockRejectedValueOnce(new Error('Something went wrong'));
- });
-
- it('shows an error message', async () => {
- await mediator.toggleAttentionRequested('reviewer', {
- user: { id: 1, username: 'root' },
- callback: jest.fn(),
- direction: 'remove',
- });
-
- expect(createFlash).toHaveBeenCalledWith(
- expect.objectContaining({
- message: 'Updating the attention request for root failed.',
- }),
- );
- });
- });
- });
});
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
index 6fc358a6a15..76e84fa183c 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_blob_edit_spec.js.snap
@@ -16,6 +16,7 @@ exports[`Snippet Blob Edit component with loaded blob matches snapshot 1`] = `
<source-editor-stub
debouncevalue="250"
editoroptions="[object Object]"
+ extensions="[object Object]"
fileglobalid="blob_local_7"
filename="foo/bar/test.md"
value="Lorem ipsum dolar sit amet,
diff --git a/spec/frontend/surveys/merge_request_performance/app_spec.js b/spec/frontend/surveys/merge_request_performance/app_spec.js
index 6e8cc660b1d..cd549155914 100644
--- a/spec/frontend/surveys/merge_request_performance/app_spec.js
+++ b/spec/frontend/surveys/merge_request_performance/app_spec.js
@@ -25,6 +25,9 @@ describe('MergeRequestExperienceSurveyApp', () => {
shouldShowCallout,
});
wrapper = shallowMountExtended(MergeRequestExperienceSurveyApp, {
+ propsData: {
+ accountAge: 0,
+ },
stubs: {
UserCalloutDismisser: dismisserComponent,
GlSprintf,
@@ -82,11 +85,17 @@ describe('MergeRequestExperienceSurveyApp', () => {
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'survey:mr_experience', {
value: 5,
label: 'overall',
+ extra: {
+ accountAge: 0,
+ },
});
rate.vm.$emit('rate', 4);
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'survey:mr_experience', {
value: 4,
label: 'performance',
+ extra: {
+ accountAge: 0,
+ },
});
});
diff --git a/spec/frontend/test_setup.js b/spec/frontend/test_setup.js
index b4626625f31..3fb226e5ed3 100644
--- a/spec/frontend/test_setup.js
+++ b/spec/frontend/test_setup.js
@@ -1,8 +1,7 @@
/* Setup for unit test environment */
+// eslint-disable-next-line no-restricted-syntax
+import { setImmediate } from 'timers';
import 'helpers/shared_test_setup';
-import { initializeTestTimeout } from 'helpers/timeout';
-
-initializeTestTimeout(process.env.CI ? 6000 : 500);
afterEach(() =>
// give Promises a bit more time so they fail the right test
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index b171c8fc9ed..0530569c9df 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -10,6 +10,8 @@ jest.mock('~/api/user_api', () => ({
}));
describe('User Popovers', () => {
+ let origGon;
+
const fixtureTemplate = 'merge_requests/merge_request_with_mentions.html';
const selector = '.js-user-link[data-user], .js-user-link[data-user-id]';
@@ -39,7 +41,7 @@ describe('User Popovers', () => {
el.dispatchEvent(event);
};
- beforeEach(() => {
+ const setupTestSubject = () => {
loadHTMLFixture(fixtureTemplate);
const usersCacheSpy = () => Promise.resolve(dummyUser);
@@ -56,147 +58,179 @@ describe('User Popovers', () => {
document.body.appendChild(mountingRoot);
popoverInstance.$mount(mountingRoot);
});
+ };
+
+ beforeEach(() => {
+ origGon = window.gon;
+ window.gon = {};
});
afterEach(() => {
- resetHTMLFixture();
+ window.gon = origGon;
});
- describe('shows a placeholder popover on hover', () => {
- let linksWithUsers;
+ describe('when signed out', () => {
beforeEach(() => {
- linksWithUsers = findFixtureLinks();
+ setupTestSubject();
+ });
+
+ it('does not show a placeholder popover on hover', () => {
+ const linksWithUsers = findFixtureLinks();
linksWithUsers.forEach((el) => {
triggerEvent('mouseover', el);
});
+
+ expect(findPopovers().length).toBe(0);
});
+ });
- it('for initial links', () => {
- expect(findPopovers().length).toBe(linksWithUsers.length);
+ describe('when signed in', () => {
+ beforeEach(() => {
+ window.gon.current_user_id = 7;
+
+ setupTestSubject();
});
- it('for elements added after initial load', async () => {
- const addedLinks = [createUserLink(), createUserLink()];
- addedLinks.forEach((link) => {
- document.body.appendChild(link);
- });
+ afterEach(() => {
+ resetHTMLFixture();
+ });
- jest.runOnlyPendingTimers();
+ describe('shows a placeholder popover on hover', () => {
+ let linksWithUsers;
+ beforeEach(() => {
+ linksWithUsers = findFixtureLinks();
+ linksWithUsers.forEach((el) => {
+ triggerEvent('mouseover', el);
+ });
+ });
- addedLinks.forEach((link) => {
- triggerEvent('mouseover', link);
+ it('for initial links', () => {
+ expect(findPopovers().length).toBe(linksWithUsers.length);
});
- expect(findPopovers().length).toBe(linksWithUsers.length + addedLinks.length);
+ it('for elements added after initial load', async () => {
+ const addedLinks = [createUserLink(), createUserLink()];
+ addedLinks.forEach((link) => {
+ document.body.appendChild(link);
+ });
+
+ jest.runOnlyPendingTimers();
+
+ addedLinks.forEach((link) => {
+ triggerEvent('mouseover', link);
+ });
+
+ expect(findPopovers().length).toBe(linksWithUsers.length + addedLinks.length);
+ });
});
- });
- it('does not initialize the popovers for group references', async () => {
- const [groupLink] = Array.from(document.querySelectorAll('.js-user-link[data-group]'));
+ it('does not initialize the popovers for group references', async () => {
+ const [groupLink] = Array.from(document.querySelectorAll('.js-user-link[data-group]'));
- triggerEvent('mouseover', groupLink);
- jest.runOnlyPendingTimers();
+ triggerEvent('mouseover', groupLink);
+ jest.runOnlyPendingTimers();
- expect(findPopovers().length).toBe(0);
- });
+ expect(findPopovers().length).toBe(0);
+ });
- it('does not initialize the popovers for @all references', async () => {
- const [projectLink] = Array.from(document.querySelectorAll('.js-user-link[data-project]'));
+ it('does not initialize the popovers for @all references', async () => {
+ const [projectLink] = Array.from(document.querySelectorAll('.js-user-link[data-project]'));
- triggerEvent('mouseover', projectLink);
- jest.runOnlyPendingTimers();
+ triggerEvent('mouseover', projectLink);
+ jest.runOnlyPendingTimers();
- expect(findPopovers().length).toBe(0);
- });
+ expect(findPopovers().length).toBe(0);
+ });
- it('does not initialize the user popovers twice for the same element', async () => {
- const [firstUserLink] = findFixtureLinks();
- triggerEvent('mouseover', firstUserLink);
- jest.runOnlyPendingTimers();
- triggerEvent('mouseleave', firstUserLink);
- jest.runOnlyPendingTimers();
- triggerEvent('mouseover', firstUserLink);
- jest.runOnlyPendingTimers();
+ it('does not initialize the user popovers twice for the same element', async () => {
+ const [firstUserLink] = findFixtureLinks();
+ triggerEvent('mouseover', firstUserLink);
+ jest.runOnlyPendingTimers();
+ triggerEvent('mouseleave', firstUserLink);
+ jest.runOnlyPendingTimers();
+ triggerEvent('mouseover', firstUserLink);
+ jest.runOnlyPendingTimers();
- expect(findPopovers().length).toBe(1);
- });
+ expect(findPopovers().length).toBe(1);
+ });
- describe('when user link emits mouseenter event with empty user cache', () => {
- let userLink;
+ describe('when user link emits mouseenter event with empty user cache', () => {
+ let userLink;
- beforeEach(() => {
- UsersCache.retrieveById.mockReset();
+ beforeEach(() => {
+ UsersCache.retrieveById.mockReset();
- [userLink] = findFixtureLinks();
+ [userLink] = findFixtureLinks();
- triggerEvent('mouseover', userLink);
- });
+ triggerEvent('mouseover', userLink);
+ });
- it('populates popover with preloaded user data', () => {
- const { name, userId, username } = userLink.dataset;
+ it('populates popover with preloaded user data', () => {
+ const { name, userId, username } = userLink.dataset;
- expect(userLink.user).toEqual(
- expect.objectContaining({
- name,
- userId,
- username,
- }),
- );
+ expect(userLink.user).toEqual(
+ expect.objectContaining({
+ name,
+ userId,
+ username,
+ }),
+ );
+ });
});
- });
- describe('when user link emits mouseenter event', () => {
- let userLink;
+ describe('when user link emits mouseenter event', () => {
+ let userLink;
- beforeEach(() => {
- [userLink] = findFixtureLinks();
+ beforeEach(() => {
+ [userLink] = findFixtureLinks();
- triggerEvent('mouseover', userLink);
- });
+ triggerEvent('mouseover', userLink);
+ });
- it('removes title attribute from user links', () => {
- expect(userLink.getAttribute('title')).toBeFalsy();
- expect(userLink.dataset.originalTitle).toBeFalsy();
- });
+ it('removes title attribute from user links', () => {
+ expect(userLink.getAttribute('title')).toBeFalsy();
+ expect(userLink.dataset.originalTitle).toBeFalsy();
+ });
- it('fetches user info and status from the user cache', () => {
- const { userId } = userLink.dataset;
+ it('fetches user info and status from the user cache', () => {
+ const { userId } = userLink.dataset;
- expect(UsersCache.retrieveById).toHaveBeenCalledWith(userId);
- expect(UsersCache.retrieveStatusById).toHaveBeenCalledWith(userId);
- });
+ expect(UsersCache.retrieveById).toHaveBeenCalledWith(userId);
+ expect(UsersCache.retrieveStatusById).toHaveBeenCalledWith(userId);
+ });
- it('removes aria-describedby attribute from the user link on mouseleave', () => {
- userLink.setAttribute('aria-describedby', 'popover');
- triggerEvent('mouseleave', userLink);
+ it('removes aria-describedby attribute from the user link on mouseleave', () => {
+ userLink.setAttribute('aria-describedby', 'popover');
+ triggerEvent('mouseleave', userLink);
- expect(userLink.getAttribute('aria-describedby')).toBe(null);
- });
+ expect(userLink.getAttribute('aria-describedby')).toBe(null);
+ });
- it('updates toggle follow button and `UsersCache` when toggle follow button is clicked', async () => {
- const [firstPopover] = findPopovers();
- const withinFirstPopover = within(firstPopover);
- const findFollowButton = () => withinFirstPopover.queryByRole('button', { name: 'Follow' });
- const findUnfollowButton = () =>
- withinFirstPopover.queryByRole('button', { name: 'Unfollow' });
+ it('updates toggle follow button and `UsersCache` when toggle follow button is clicked', async () => {
+ const [firstPopover] = findPopovers();
+ const withinFirstPopover = within(firstPopover);
+ const findFollowButton = () => withinFirstPopover.queryByRole('button', { name: 'Follow' });
+ const findUnfollowButton = () =>
+ withinFirstPopover.queryByRole('button', { name: 'Unfollow' });
- jest.runOnlyPendingTimers();
+ jest.runOnlyPendingTimers();
- const { userId } = document.querySelector(selector).dataset;
+ const { userId } = document.querySelector(selector).dataset;
- triggerEvent('click', findFollowButton());
+ triggerEvent('click', findFollowButton());
- await waitForPromises();
+ await waitForPromises();
- expect(findUnfollowButton()).not.toBe(null);
- expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: true });
+ expect(findUnfollowButton()).not.toBe(null);
+ expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: true });
- triggerEvent('click', findUnfollowButton());
+ triggerEvent('click', findUnfollowButton());
- await waitForPromises();
+ await waitForPromises();
- expect(findFollowButton()).not.toBe(null);
- expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: false });
+ expect(findFollowButton()).not.toBe(null);
+ expect(UsersCache.updateById).toHaveBeenCalledWith(userId, { is_followed: false });
+ });
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/extensions/actions_spec.js b/spec/frontend/vue_merge_request_widget/components/action_buttons.js
index a13db2f4d72..6d714aeaf18 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/action_buttons.js
@@ -1,6 +1,6 @@
import { GlButton, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Actions from '~/vue_merge_request_widget/components/extensions/actions.vue';
+import Actions from '~/vue_merge_request_widget/components/action_buttons.vue';
let wrapper;
diff --git a/spec/frontend/vue_mr_widget/components/added_commit_message_spec.js b/spec/frontend/vue_merge_request_widget/components/added_commit_message_spec.js
index 150680caa7e..cb53dc1fb61 100644
--- a/spec/frontend/vue_mr_widget/components/added_commit_message_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/added_commit_message_spec.js
@@ -10,11 +10,6 @@ function factory(propsData) {
targetBranch: 'main',
...propsData,
},
- provide: {
- glFeatures: {
- restructuredMrWidget: true.valueOf,
- },
- },
});
}
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
index 05cd1bb5b3d..05cd1bb5b3d 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js
index 65cafc647e0..65cafc647e0 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_optional_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_optional_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js
index c2606346292..c2606346292 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/approvals_summary_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js b/spec/frontend/vue_merge_request_widget/components/approvals/humanized_text_spec.js
index d6776c00b29..d6776c00b29 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/humanized_text_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/approvals/humanized_text_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
index e2386bc7f2b..e2386bc7f2b 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_app_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_app_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js b/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js
index 712abfe228a..712abfe228a 100644
--- a/spec/frontend/vue_mr_widget/components/artifacts_list_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/artifacts_list_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js b/spec/frontend/vue_merge_request_widget/components/extensions/child_content_spec.js
index 198a4c2823a..198a4c2823a 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/child_content_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/extensions/child_content_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js b/spec/frontend/vue_merge_request_widget/components/extensions/index_spec.js
index dc25596655a..dc25596655a 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/extensions/index_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js b/spec/frontend/vue_merge_request_widget/components/extensions/status_icon_spec.js
index f3aa5bb774f..f3aa5bb774f 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/status_icon_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/extensions/status_icon_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/extensions/utils_spec.js b/spec/frontend/vue_merge_request_widget/components/extensions/utils_spec.js
index 5799799ad5e..5799799ad5e 100644
--- a/spec/frontend/vue_mr_widget/components/extensions/utils_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/extensions/utils_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
index 01fbcb2154f..01fbcb2154f 100644
--- a/spec/frontend/vue_mr_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_alert_message_spec.js
index 5d923d0383f..5d923d0383f 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_alert_message_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_alert_message_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_author_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_spec.js
index 8a42e2e2ce7..8a42e2e2ce7 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_author_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js
index 8fd93809e01..8fd93809e01 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_author_time_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_container_spec.js
index 4e3e918f7fb..4e3e918f7fb 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_container_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_container_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_expandable_section_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
index 631aef412a6..631aef412a6 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_expandable_section_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_expandable_section_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js
index ebd10f31fa7..ebd10f31fa7 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_icon_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_icon_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_memory_usage_spec.js
index f0106914674..193a16bae8d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_memory_usage_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_memory_usage_spec.js
@@ -80,20 +80,20 @@ describe('MemoryUsage', () => {
it('should have default data', () => {
const data = MemoryUsage.data();
- expect(Array.isArray(data.memoryMetrics)).toBeTruthy();
+ expect(Array.isArray(data.memoryMetrics)).toBe(true);
expect(data.memoryMetrics.length).toBe(0);
expect(typeof data.deploymentTime).toBe('number');
expect(data.deploymentTime).toBe(0);
expect(typeof data.hasMetrics).toBe('boolean');
- expect(data.hasMetrics).toBeFalsy();
+ expect(data.hasMetrics).toBe(false);
expect(typeof data.loadFailed).toBe('boolean');
- expect(data.loadFailed).toBeFalsy();
+ expect(data.loadFailed).toBe(false);
expect(typeof data.loadingMetrics).toBe('boolean');
- expect(data.loadingMetrics).toBeTruthy();
+ expect(data.loadingMetrics).toBe(true);
expect(typeof data.backOffRequestCounter).toBe('number');
expect(data.backOffRequestCounter).toBe(0);
@@ -144,7 +144,7 @@ describe('MemoryUsage', () => {
vm.computeGraphData(metrics, deployment_time);
const { hasMetrics, memoryMetrics, deploymentTime, memoryFrom, memoryTo } = vm;
- expect(hasMetrics).toBeTruthy();
+ expect(hasMetrics).toBe(true);
expect(memoryMetrics.length).toBeGreaterThan(0);
expect(deploymentTime).toEqual(deployment_time);
expect(memoryFrom).toEqual('9.13');
@@ -171,7 +171,7 @@ describe('MemoryUsage', () => {
describe('template', () => {
it('should render template elements correctly', () => {
- expect(el.classList.contains('mr-memory-usage')).toBeTruthy();
+ expect(el.classList.contains('mr-memory-usage')).toBe(true);
expect(el.querySelector('.js-usage-info')).toBeDefined();
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
index efe2bf75c3f..efe2bf75c3f 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_container_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_container_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
index 6347e3c3be3..6347e3c3be3 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_pipeline_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js
index 6db82cedd80..534c0baf35d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_rebase_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_rebase_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import WidgetRebase from '~/vue_merge_request_widget/components/states/mr_widget_rebase.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -8,8 +8,8 @@ jest.mock('~/vue_shared/plugins/global_toast');
let wrapper;
-function createWrapper(propsData, mergeRequestWidgetGraphql, rebaseWithoutCiUi) {
- wrapper = shallowMount(WidgetRebase, {
+function createWrapper(propsData, mergeRequestWidgetGraphql) {
+ wrapper = mount(WidgetRebase, {
propsData,
data() {
return {
@@ -22,7 +22,7 @@ function createWrapper(propsData, mergeRequestWidgetGraphql, rebaseWithoutCiUi)
},
};
},
- provide: { glFeatures: { mergeRequestWidgetGraphql, rebaseWithoutCiUi } },
+ provide: { glFeatures: { mergeRequestWidgetGraphql } },
mocks: {
$apollo: {
queries: {
@@ -110,7 +110,7 @@ describe('Merge request widget rebase component', () => {
expect(findRebaseMessageText()).toContain('Something went wrong!');
});
- describe('Rebase buttons with flag rebaseWithoutCiUi', () => {
+ describe('Rebase buttons with', () => {
beforeEach(() => {
createWrapper(
{
@@ -124,7 +124,6 @@ describe('Merge request widget rebase component', () => {
},
},
mergeRequestWidgetGraphql,
- { rebaseWithoutCiUi: true },
);
});
@@ -149,35 +148,6 @@ describe('Merge request widget rebase component', () => {
expect(rebaseMock).toHaveBeenCalledWith({ skipCi: true });
});
});
-
- describe('Rebase button with rebaseWithoutCiUI flag disabled', () => {
- beforeEach(() => {
- createWrapper(
- {
- mr: {
- rebaseInProgress: false,
- canPushToSourceBranch: true,
- },
- service: {
- rebase: rebaseMock,
- poll: pollMock,
- },
- },
- mergeRequestWidgetGraphql,
- );
- });
-
- it('standard rebase button is rendered', () => {
- expect(findStandardRebaseButton().exists()).toBe(true);
- expect(findRebaseWithoutCiButton().exists()).toBe(false);
- });
-
- it('calls rebase method with skip_ci false', () => {
- findStandardRebaseButton().vm.$emit('click');
-
- expect(rebaseMock).toHaveBeenCalledWith({ skipCi: false });
- });
- });
});
describe('without permissions', () => {
@@ -216,24 +186,7 @@ describe('Merge request widget rebase component', () => {
});
});
- it('does not render the "Rebase without pipeline" button with rebaseWithoutCiUI flag enabled', () => {
- createWrapper(
- {
- mr: {
- rebaseInProgress: false,
- canPushToSourceBranch: false,
- targetBranch: exampleTargetBranch,
- },
- service: {},
- },
- mergeRequestWidgetGraphql,
- { rebaseWithoutCiUi: true },
- );
-
- expect(findRebaseWithoutCiButton().exists()).toBe(false);
- });
-
- it('does not render the standard rebase button with rebaseWithoutCiUI flag disabled', () => {
+ it('does render the "Rebase without pipeline" button', () => {
createWrapper(
{
mr: {
@@ -246,7 +199,7 @@ describe('Merge request widget rebase component', () => {
mergeRequestWidgetGraphql,
);
- expect(findStandardRebaseButton().exists()).toBe(false);
+ expect(findRebaseWithoutCiButton().exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_related_links_spec.js
index 15522f7ac1d..15522f7ac1d 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_related_links_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_related_links_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js
index c25e10c5249..11373be578a 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_status_icon_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_status_icon_spec.js
@@ -6,7 +6,6 @@ describe('MR widget status icon component', () => {
let wrapper;
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const findDisabledMergeButton = () => wrapper.find('[data-testid="disabled-merge-button"]');
const createWrapper = (props, mountFn = shallowMount) => {
wrapper = mountFn(mrStatusIcon, {
@@ -41,20 +40,4 @@ describe('MR widget status icon component', () => {
expect(wrapper.find('[data-testid="status_failed-icon"]').exists()).toBe(true);
});
});
-
- describe('with disabled button', () => {
- it('renders a disabled button', () => {
- createWrapper({ status: 'failed', showDisabledButton: true });
-
- expect(findDisabledMergeButton().exists()).toBe(true);
- });
- });
-
- describe('without disabled button', () => {
- it('does not render a disabled button', () => {
- createWrapper({ status: 'failed' });
-
- expect(findDisabledMergeButton().exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js
index 352bc1a08ea..352bc1a08ea 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_suggest_pipeline_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_widget_suggest_pipeline_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js b/spec/frontend/vue_merge_request_widget/components/pipeline_tour_mock_data.js
index eef087d62b8..eef087d62b8 100644
--- a/spec/frontend/vue_mr_widget/components/pipeline_tour_mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/components/pipeline_tour_mock_data.js
diff --git a/spec/frontend/vue_mr_widget/components/review_app_link_spec.js b/spec/frontend/vue_merge_request_widget/components/review_app_link_spec.js
index e393b56034d..e393b56034d 100644
--- a/spec/frontend/vue_mr_widget/components/review_app_link_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/review_app_link_spec.js
diff --git a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
new file mode 100644
index 00000000000..de25e2a0450
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
@@ -0,0 +1,241 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have correct elements 1`] = `
+<div
+ class="mr-widget-body media"
+>
+ <svg
+ aria-hidden="true"
+ class="gl-text-blue-500 gl-mr-3 gl-mt-1 gl-icon s24"
+ data-testid="status_scheduled-icon"
+ role="img"
+ >
+ <use
+ href="#status_scheduled"
+ />
+ </svg>
+
+ <div
+ class="media-body gl-display-flex"
+ >
+
+ <h4
+ class="gl-mr-3"
+ data-testid="statusText"
+ >
+ Set by
+ <a
+ class="author-link inline"
+ >
+ <img
+ class="avatar avatar-inline s16"
+ src="no_avatar.png"
+ />
+
+ <span
+ class="author"
+ >
+
+ </span>
+ </a>
+ to be merged automatically when the pipeline succeeds
+ </h4>
+
+ <div
+ class="gl-display-flex gl-md-display-block gl-font-size-0 gl-ml-auto gl-mt-1"
+ >
+ <div>
+ <div
+ class="dropdown b-dropdown gl-new-dropdown gl-display-block gl-md-display-none! btn-group"
+ lazy=""
+ no-caret=""
+ >
+ <!---->
+ <button
+ aria-expanded="false"
+ aria-haspopup="true"
+ class="btn dropdown-toggle btn-default btn-sm gl-p-2! gl-button gl-dropdown-toggle btn-default-tertiary dropdown-icon-only dropdown-toggle-no-caret"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ aria-hidden="true"
+ class="dropdown-icon gl-icon s16"
+ data-testid="ellipsis_v-icon"
+ role="img"
+ >
+ <use
+ href="#ellipsis_v"
+ />
+ </svg>
+
+ <span
+ class="gl-new-dropdown-button-text gl-sr-only"
+ >
+
+ </span>
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon dropdown-chevron gl-icon s16"
+ data-testid="chevron-down-icon"
+ role="img"
+ >
+ <use
+ href="#chevron-down"
+ />
+ </svg>
+ </button>
+ <ul
+ class="dropdown-menu dropdown-menu-right"
+ role="menu"
+ tabindex="-1"
+ >
+ <!---->
+ </ul>
+ </div>
+
+ <button
+ class="btn gl-display-none gl-md-display-block gl-float-left btn-confirm btn-sm gl-button btn-confirm-tertiary js-cancel-auto-merge"
+ data-qa-selector="cancel_auto_merge_button"
+ data-testid="cancelAutomaticMergeButton"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ Cancel auto-merge
+
+ </span>
+ </button>
+ </div>
+ </div>
+ </div>
+</div>
+`;
+
+exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have correct elements 1`] = `
+<div
+ class="mr-widget-body media"
+>
+ <svg
+ aria-hidden="true"
+ class="gl-text-blue-500 gl-mr-3 gl-mt-1 gl-icon s24"
+ data-testid="status_scheduled-icon"
+ role="img"
+ >
+ <use
+ href="#status_scheduled"
+ />
+ </svg>
+
+ <div
+ class="media-body gl-display-flex"
+ >
+
+ <h4
+ class="gl-mr-3"
+ data-testid="statusText"
+ >
+ Set by
+ <a
+ class="author-link inline"
+ >
+ <img
+ class="avatar avatar-inline s16"
+ src="no_avatar.png"
+ />
+
+ <span
+ class="author"
+ >
+
+ </span>
+ </a>
+ to be merged automatically when the pipeline succeeds
+ </h4>
+
+ <div
+ class="gl-display-flex gl-md-display-block gl-font-size-0 gl-ml-auto gl-mt-1"
+ >
+ <div>
+ <div
+ class="dropdown b-dropdown gl-new-dropdown gl-display-block gl-md-display-none! btn-group"
+ lazy=""
+ no-caret=""
+ >
+ <!---->
+ <button
+ aria-expanded="false"
+ aria-haspopup="true"
+ class="btn dropdown-toggle btn-default btn-sm gl-p-2! gl-button gl-dropdown-toggle btn-default-tertiary dropdown-icon-only dropdown-toggle-no-caret"
+ type="button"
+ >
+ <!---->
+
+ <svg
+ aria-hidden="true"
+ class="dropdown-icon gl-icon s16"
+ data-testid="ellipsis_v-icon"
+ role="img"
+ >
+ <use
+ href="#ellipsis_v"
+ />
+ </svg>
+
+ <span
+ class="gl-new-dropdown-button-text gl-sr-only"
+ >
+
+ </span>
+
+ <svg
+ aria-hidden="true"
+ class="gl-button-icon dropdown-chevron gl-icon s16"
+ data-testid="chevron-down-icon"
+ role="img"
+ >
+ <use
+ href="#chevron-down"
+ />
+ </svg>
+ </button>
+ <ul
+ class="dropdown-menu dropdown-menu-right"
+ role="menu"
+ tabindex="-1"
+ >
+ <!---->
+ </ul>
+ </div>
+
+ <button
+ class="btn gl-display-none gl-md-display-block gl-float-left btn-confirm btn-sm gl-button btn-confirm-tertiary js-cancel-auto-merge"
+ data-qa-selector="cancel_auto_merge_button"
+ data-testid="cancelAutomaticMergeButton"
+ type="button"
+ >
+ <!---->
+
+ <!---->
+
+ <span
+ class="gl-button-text"
+ >
+
+ Cancel auto-merge
+
+ </span>
+ </button>
+ </div>
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap
index 98297630792..7e741bf4660 100644
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_pipeline_failed_spec.js.snap
@@ -5,7 +5,7 @@ exports[`PipelineFailed should render error message with a disabled merge button
class="mr-widget-body media"
>
<status-icon-stub
- showdisabledbutton="true"
+ show-disabled-button="true"
status="warning"
/>
@@ -13,7 +13,7 @@ exports[`PipelineFailed should render error message with a disabled merge button
class="media-body space-children"
>
<span
- class="bold"
+ class="gl-ml-0! gl-text-body! bold"
>
<gl-sprintf-stub
message="Merge blocked: pipeline must succeed. Push a commit that fixes the failure, or %{linkStart}learn about other solutions.%{linkEnd}"
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
index f9936f22ea3..f9936f22ea3 100644
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
+++ b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/new_ready_to_merge_spec.js.snap
diff --git a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js b/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js
index c0add94e6ed..c0add94e6ed 100644
--- a/spec/frontend/vue_mr_widget/components/states/commit_edit_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/commit_edit_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/merge_checks_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js
index 1900b53ac11..1900b53ac11 100644
--- a/spec/frontend/vue_mr_widget/components/states/merge_checks_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/merge_checks_failed_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js b/spec/frontend/vue_merge_request_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js
index 0e1c38437f0..c9aca01083d 100644
--- a/spec/frontend/vue_mr_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/merge_failed_pipeline_confirmation_dialog_spec.js
@@ -50,7 +50,7 @@ describe('MergeFailedPipelineConfirmationDialog', () => {
it('should emit the mergeWithFailedPipeline event', () => {
findMergeBtn().vm.$emit('click');
- expect(wrapper.emitted('mergeWithFailedPipeline')).toBeTruthy();
+ expect(wrapper.emitted('mergeWithFailedPipeline')).toHaveLength(1);
});
it('when the cancel button is clicked should emit cancel and call hide', () => {
@@ -58,14 +58,14 @@ describe('MergeFailedPipelineConfirmationDialog', () => {
findCancelBtn().vm.$emit('click');
- expect(wrapper.emitted('cancel')).toBeTruthy();
+ expect(wrapper.emitted('cancel')).toHaveLength(1);
expect(findModal().vm.hide).toHaveBeenCalled();
});
it('should emit cancel when the hide event is emitted', () => {
findModal().vm.$emit('hide');
- expect(wrapper.emitted('cancel')).toBeTruthy();
+ expect(wrapper.emitted('cancel')).toHaveLength(1);
});
it('when modal is shown it will focus the cancel button', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_archived_spec.js
index f3061d792d0..9332b7e334a 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_archived_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_archived_spec.js
@@ -18,11 +18,6 @@ describe('MRWidgetArchived', () => {
expect(vm.$el.querySelector('.ci-status-icon')).not.toBeNull();
});
- it('renders a disabled button', () => {
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toEqual('disabled');
- expect(vm.$el.querySelector('button').textContent.trim()).toEqual('Merge');
- });
-
it('renders information', () => {
expect(vm.$el.querySelector('.bold').textContent.trim()).toEqual(
'Merge unavailable: merge requests are read-only on archived projects.',
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 7387ed2d5e9..28182793683 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
@@ -37,7 +37,7 @@ function factory(propsData, stateOverride = {}) {
}
wrapper = extendedWrapper(
- shallowMount(autoMergeEnabledComponent, {
+ mount(autoMergeEnabledComponent, {
propsData: {
mr: propsData,
service: new MRWidgetService({}),
@@ -73,7 +73,7 @@ const defaultMrProps = () => ({
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
-const getStatusText = () => wrapper.findByTestId('statusText').attributes('message');
+const getStatusText = () => wrapper.findByTestId('statusText').text();
describe('MRWidgetAutoMergeEnabled', () => {
let oldWindowGl;
@@ -102,74 +102,6 @@ describe('MRWidgetAutoMergeEnabled', () => {
});
describe('computed', () => {
- describe('canRemoveSourceBranch', () => {
- it('should return true when user is able to remove source branch', () => {
- factory({
- ...defaultMrProps(),
- });
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
- });
-
- it.each`
- mergeUserId | currentUserId
- ${2} | ${1}
- ${1} | ${2}
- `(
- 'should return false when user id is not the same with who set the MWPS',
- ({ mergeUserId, currentUserId }) => {
- factory({
- ...defaultMrProps(),
- mergeUserId,
- currentUserId,
- });
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
- },
- );
-
- it('should not find "Delete" button when shouldRemoveSourceBranch set to true', () => {
- factory({
- ...defaultMrProps(),
- shouldRemoveSourceBranch: true,
- });
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
- });
-
- it('should find "Delete" button when shouldRemoveSourceBranch overrides state.forceRemoveSourceBranch', () => {
- factory(
- {
- ...defaultMrProps(),
- shouldRemoveSourceBranch: false,
- },
- {
- forceRemoveSourceBranch: true,
- },
- );
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
- });
-
- it('should find "Delete" button when shouldRemoveSourceBranch set to false', () => {
- factory({
- ...defaultMrProps(),
- shouldRemoveSourceBranch: false,
- });
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
- });
-
- it('should return false if user is not able to remove the source branch', () => {
- factory({
- ...defaultMrProps(),
- canRemoveSourceBranch: false,
- });
-
- expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
- });
- });
-
describe('cancelButtonText', () => {
it('should return "Cancel" if MWPS is selected', () => {
factory({
@@ -205,7 +137,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
await waitForPromises();
- expect(wrapper.vm.isCancellingAutoMerge).toBeTruthy();
+ expect(wrapper.vm.isCancellingAutoMerge).toBe(true);
if (mergeRequestWidgetGraphql) {
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
} else {
@@ -265,50 +197,14 @@ describe('MRWidgetAutoMergeEnabled', () => {
expect(wrapper.find('.js-cancel-auto-merge').props('loading')).toBe(true);
});
- it('should show source branch will be deleted text when it source branch set to remove', () => {
- factory({
- ...defaultMrProps(),
- shouldRemoveSourceBranch: true,
- });
-
- const normalizedText = wrapper.text().replace(/\s+/g, ' ');
-
- expect(normalizedText).toContain('Deletes the source branch');
- expect(normalizedText).not.toContain('Does not delete the source branch');
- });
-
- it('should not show delete source branch button when user not able to delete source branch', () => {
- factory({
- ...defaultMrProps(),
- currentUserId: 4,
- });
-
- expect(wrapper.find('.js-remove-source-branch').exists()).toBe(false);
- });
-
- it('should disable delete source branch button when the action is in progress', async () => {
- factory({
- ...defaultMrProps(),
- });
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- isRemovingSourceBranch: true,
- });
-
- await nextTick();
-
- expect(wrapper.find('.js-remove-source-branch').props('loading')).toBe(true);
- });
-
it('should render the status text as "...to merged automatically" if MWPS is selected', () => {
factory({
...defaultMrProps(),
autoMergeStrategy: MWPS_MERGE_STRATEGY,
});
- expect(getStatusText()).toBe(
- 'Set by %{merge_author} to be merged automatically when the pipeline succeeds',
+ expect(getStatusText()).toContain(
+ 'to be merged automatically when the pipeline succeeds',
);
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js
index 24198096564..9320e733636 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed_spec.js
@@ -1,5 +1,5 @@
import { GlLoadingIcon, GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import AutoMergeFailedComponent from '~/vue_merge_request_widget/components/states/mr_widget_auto_merge_failed.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -10,7 +10,7 @@ describe('MRWidgetAutoMergeFailed', () => {
const findButton = () => wrapper.find(GlButton);
const createComponent = (props = {}, mergeRequestWidgetGraphql = false) => {
- wrapper = shallowMount(AutoMergeFailedComponent, {
+ wrapper = mount(AutoMergeFailedComponent, {
propsData: { ...props },
data() {
if (mergeRequestWidgetGraphql) {
@@ -60,7 +60,7 @@ describe('MRWidgetAutoMergeFailed', () => {
await nextTick();
- expect(findButton().attributes('disabled')).toBe('true');
+ expect(findButton().attributes('disabled')).toBe('disabled');
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_checking_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_checking_spec.js
index afe6bd0e767..02de426204b 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_checking_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_checking_spec.js
@@ -15,10 +15,6 @@ describe('MRWidgetChecking', () => {
vm.$destroy();
});
- it('renders disabled button', () => {
- expect(vm.$el.querySelector('button').getAttribute('disabled')).toEqual('disabled');
- });
-
it('renders loading icon', () => {
expect(vm.$el.querySelector('.mr-widget-icon span').classList).toContain('gl-spinner');
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js
index 6ae218ce6f8..f7d046eb8f9 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_closed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_closed_spec.js
@@ -36,28 +36,4 @@ describe('MRWidgetClosed', () => {
it('renders warning icon', () => {
expect(vm.$el.querySelector('.js-ci-status-icon-warning')).not.toBeNull();
});
-
- it('renders closed by information with author and time', () => {
- expect(
- vm.$el.querySelector('.js-mr-widget-author').textContent.trim().replace(/\s\s+/g, ' '),
- ).toContain('Closed by Administrator less than a minute ago');
- });
-
- it('links to the user that closed the MR', () => {
- expect(vm.$el.querySelector('.author-link').getAttribute('href')).toEqual(
- 'http://localhost:3000/root',
- );
- });
-
- it('renders information about the changes not being merged', () => {
- expect(
- vm.$el.querySelector('.mr-info-list').textContent.trim().replace(/\s\s+/g, ' '),
- ).toContain('The changes were not merged into so_long_jquery');
- });
-
- it('renders link for target branch', () => {
- expect(vm.$el.querySelector('.label-branch').getAttribute('href')).toEqual(
- '/twitter/flight/commits/so_long_jquery',
- );
- });
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_commit_message_dropdown_spec.js
index 663fabb761c..663fabb761c 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commit_message_dropdown_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_commit_message_dropdown_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_commits_header_spec.js
index 2796403b7d0..774e2bafed3 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_commits_header_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_commits_header_spec.js
@@ -27,7 +27,6 @@ describe('Commits header component', () => {
const findHeaderWrapper = () => wrapper.find('.js-mr-widget-commits-count');
const findCommitToggle = () => wrapper.find('.commit-edit-toggle');
- const findCommitsCountMessage = () => wrapper.find('.commits-count-message');
const findTargetBranchMessage = () => wrapper.find('.label-branch');
const findModifyButton = () => wrapper.find('.modify-message-button');
@@ -40,7 +39,7 @@ describe('Commits header component', () => {
});
it('has commits count message showing 1 commit', () => {
- expect(findCommitsCountMessage().text()).toBe('1 commit');
+ expect(wrapper.text()).toContain('1 commit');
});
it('has button with modify commit message', () => {
@@ -75,7 +74,7 @@ describe('Commits header component', () => {
});
it('has commits count message showing correct amount of commits', () => {
- expect(findCommitsCountMessage().text()).toBe('5 commits');
+ expect(wrapper.text()).toContain('5 commits');
});
it('has button with modify merge commit message', () => {
@@ -89,7 +88,7 @@ describe('Commits header component', () => {
});
it('has commits count message showing one commit when squash is enabled', () => {
- expect(findCommitsCountMessage().text()).toBe('1 commit');
+ expect(wrapper.text()).toContain('1 commit');
});
it('has button with modify commit messages text', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js
index 7a92484695c..7a9fd5b002d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { TEST_HOST } from 'helpers/test_constants';
import { removeBreakLine } from 'helpers/text_helper';
@@ -23,7 +23,7 @@ describe('MRWidgetConflicts', () => {
async function createComponent(propsData = {}) {
wrapper = extendedWrapper(
- shallowMount(ConflictsComponent, {
+ mount(ConflictsComponent, {
propsData,
provide: {
glFeatures: {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_failed_to_merge_spec.js
index 6d8e7056366..989aa76f09b 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_failed_to_merge_spec.js
@@ -1,6 +1,5 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
-import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
import MrWidgetFailedToMerge from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
@@ -54,7 +53,31 @@ describe('MRWidgetFailedToMerge', () => {
await nextTick();
- expect(wrapper.vm.mergeError).toBe('contains line breaks.');
+ expect(wrapper.find('[data-testid="merge-error"]').text()).toBe('contains line breaks.');
+ });
+
+ it('does not append an extra period', async () => {
+ createComponent({ mr: { mergeError: 'contains a period.' } });
+
+ await nextTick();
+
+ expect(wrapper.find('[data-testid="merge-error"]').text()).toBe('contains a period.');
+ });
+
+ it('does not insert an extra space between the final character and the period', async () => {
+ createComponent({ mr: { mergeError: 'contains a <a href="http://example.com">link</a>.' } });
+
+ await nextTick();
+
+ expect(wrapper.find('[data-testid="merge-error"]').text()).toBe('contains a link.');
+ });
+
+ it('removes extra spaces', async () => {
+ createComponent({ mr: { mergeError: 'contains a lot of spaces .' } });
+
+ await nextTick();
+
+ expect(wrapper.find('[data-testid="merge-error"]').text()).toBe('contains a lot of spaces.');
});
});
@@ -116,7 +139,6 @@ describe('MRWidgetFailedToMerge', () => {
it('renders warning icon and disabled merge button', () => {
expect(wrapper.find('.js-ci-status-icon-warning')).not.toBeNull();
- expect(wrapper.find(StatusIcon).props('showDisabledButton')).toBe(true);
});
it('renders given error', () => {
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
index 29ee7e0010f..2606933450e 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merged_spec.js
@@ -1,5 +1,5 @@
import { getByRole } from '@testing-library/dom';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
import mountComponent from 'helpers/vue_mount_component_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { OPEN_REVERT_MODAL, OPEN_CHERRY_PICK_MODAL } from '~/projects/commit/constants';
@@ -10,14 +10,6 @@ import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetMerged', () => {
let vm;
const targetBranch = 'foo';
- const selectors = {
- get copyMergeShaButton() {
- return vm.$el.querySelector('button.js-mr-merged-copy-sha');
- },
- get mergeCommitShaLink() {
- return vm.$el.querySelector('a.js-mr-merged-commit-sha');
- },
- };
beforeEach(() => {
jest.spyOn(document, 'dispatchEvent');
@@ -177,58 +169,11 @@ describe('MRWidgetMerged', () => {
expect(vm.$el.textContent).toContain('Administrator');
});
- it('renders branch information', () => {
- expect(vm.$el.textContent).toContain('The changes were merged into');
- expect(vm.$el.textContent).toContain(targetBranch);
- });
-
- it('renders information about branch being deleted', () => {
- expect(vm.$el.textContent).toContain('The source branch has been deleted');
- });
-
it('shows revert and cherry-pick buttons', () => {
expect(vm.$el.textContent).toContain('Revert');
expect(vm.$el.textContent).toContain('Cherry-pick');
});
- it('shows button to copy commit SHA to clipboard', () => {
- expect(selectors.copyMergeShaButton).not.toBe(null);
- expect(selectors.copyMergeShaButton.dataset.clipboardText).toBe(vm.mr.mergeCommitSha);
- });
-
- it('hides button to copy commit SHA if SHA does not exist', async () => {
- vm.mr.mergeCommitSha = null;
-
- await nextTick();
-
- expect(selectors.copyMergeShaButton).toBe(null);
- expect(vm.$el.querySelector('.mr-info-list').innerText).not.toContain('with');
- });
-
- it('shows merge commit SHA link', () => {
- expect(selectors.mergeCommitShaLink).not.toBe(null);
- expect(selectors.mergeCommitShaLink.text).toContain(vm.mr.shortMergeCommitSha);
- expect(selectors.mergeCommitShaLink.href).toBe(vm.mr.mergeCommitPath);
- });
-
- it('should not show source branch deleted text', async () => {
- vm.mr.sourceBranchRemoved = false;
-
- await nextTick();
-
- expect(vm.$el.innerText).not.toContain('The source branch has been deleted');
- });
-
- it('should show source branch deleting text', async () => {
- vm.mr.isRemovingSourceBranch = true;
- vm.mr.sourceBranchRemoved = false;
-
- await nextTick();
-
- expect(vm.$el.innerText).toContain('The source branch is being deleted');
- expect(vm.$el.innerText).not.toContain('The source branch has been deleted');
- });
-
it('should use mergedEvent mergedAt as tooltip title', () => {
expect(vm.$el.querySelector('time').getAttribute('title')).toBe('Jan 24, 2018 1:02pm UTC');
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js
index e16c897a49b..49bd3739fdb 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_merging_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_merging_spec.js
@@ -43,19 +43,6 @@ describe('MRWidgetMerging', () => {
).toContain('Merging!');
});
- it('renders branch information', () => {
- expect(
- wrapper
- .find('.mr-info-list')
- .text()
- .trim()
- .replace(/\s\s+/g, ' ')
- .replace(/[\r\n]+/g, ' '),
- ).toEqual('Merges changes into branch');
-
- expect(wrapper.find('a').attributes('href')).toBe('/branch-path');
- });
-
describe('initiateMergePolling', () => {
it('should call simplePoll', () => {
wrapper.vm.initiateMergePolling();
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_missing_branch_spec.js
index ddce07954ab..ddce07954ab 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_missing_branch_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_missing_branch_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_not_allowed_spec.js
index 63e93074857..63e93074857 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_not_allowed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_not_allowed_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
index c7c0b69425d..6de0c06c33d 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_nothing_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_nothing_to_merge_spec.js
@@ -13,7 +13,7 @@ describe('NothingToMerge', () => {
});
it('should have correct elements', () => {
- expect(vm.$el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(vm.$el.classList.contains('mr-widget-body')).toBe(true);
expect(vm.$el.querySelector('[data-testid="createFileButton"]').href).toContain(newBlobPath);
expect(vm.$el.innerText).toContain('Use merge requests to propose changes to your project');
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked_spec.js
index 9b10b078e89..9b10b078e89 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_blocked_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_blocked_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js
index 3e0840fef4e..4e44ac539f2 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_pipeline_failed_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_pipeline_failed_spec.js
@@ -1,5 +1,4 @@
import { shallowMount } from '@vue/test-utils';
-import statusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
import PipelineFailed from '~/vue_merge_request_widget/components/states/pipeline_failed.vue';
describe('PipelineFailed', () => {
@@ -9,8 +8,6 @@ describe('PipelineFailed', () => {
wrapper = shallowMount(PipelineFailed);
};
- const findStatusIcon = () => wrapper.find(statusIcon);
-
beforeEach(() => {
createComponent();
});
@@ -23,8 +20,4 @@ describe('PipelineFailed', () => {
it('should render error message with a disabled merge button', () => {
expect(wrapper.element).toMatchSnapshot();
});
-
- it('merge button should be disabled', () => {
- expect(findStatusIcon().props('showDisabledButton')).toBe(true);
- });
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
index 46d90ddc83c..6e89cd41559 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -1,5 +1,5 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import { shallowMount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
import { GlSprintf } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import produce from 'immer';
@@ -10,7 +10,6 @@ import readyToMergeQuery from 'ee_else_ce/vue_merge_request_widget/queries/state
import simplePoll from '~/lib/utils/simple_poll';
import CommitEdit from '~/vue_merge_request_widget/components/states/commit_edit.vue';
import CommitMessageDropdown from '~/vue_merge_request_widget/components/states/commit_message_dropdown.vue';
-import CommitsHeader from '~/vue_merge_request_widget/components/states/commits_header.vue';
import ReadyToMerge from '~/vue_merge_request_widget/components/states/ready_to_merge.vue';
import SquashBeforeMerge from '~/vue_merge_request_widget/components/states/squash_before_merge.vue';
import MergeFailedPipelineConfirmationDialog from '~/vue_merge_request_widget/components/states/merge_failed_pipeline_confirmation_dialog.vue';
@@ -60,6 +59,7 @@ const createTestMr = (customConfig) => {
transitionStateMachine: (transition) => eventHub.$emit('StateMachineValueChanged', transition),
translateStateToMachine: () => this.transitionStateMachine(),
state: 'open',
+ canMerge: true,
};
Object.assign(mr, customConfig.mr);
@@ -71,8 +71,8 @@ const createTestService = () => ({
merge: jest.fn(),
poll: jest.fn().mockResolvedValue(),
});
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+
+Vue.use(VueApollo);
let wrapper;
let readyToMergeResponseSpy;
@@ -90,10 +90,9 @@ const createReadyToMergeResponse = (customMr) => {
const createComponent = (
customConfig = {},
mergeRequestWidgetGraphql = false,
- restructuredMrWidget = false,
+ restructuredMrWidget = true,
) => {
wrapper = shallowMount(ReadyToMerge, {
- localVue,
propsData: {
mr: createTestMr(customConfig),
service: createTestService(),
@@ -112,7 +111,6 @@ const createComponent = (
};
const findCheckboxElement = () => wrapper.find(SquashBeforeMerge);
-const findCommitsHeaderElement = () => wrapper.find(CommitsHeader);
const findCommitEditElements = () => wrapper.findAll(CommitEdit);
const findCommitDropdownElement = () => wrapper.find(CommitMessageDropdown);
const findFirstCommitEditLabel = () => findCommitEditElements().at(0).props('label');
@@ -371,7 +369,7 @@ describe('ReadyToMerge', () => {
const params = wrapper.vm.service.merge.mock.calls[0][0];
- expect(params.should_remove_source_branch).toBeTruthy();
+ expect(params.should_remove_source_branch).toBe(true);
expect(params.auto_merge_strategy).toBeUndefined();
});
@@ -395,7 +393,7 @@ describe('ReadyToMerge', () => {
const params = wrapper.vm.service.merge.mock.calls[0][0];
- expect(params.should_remove_source_branch).toBeTruthy();
+ expect(params.should_remove_source_branch).toBe(true);
expect(params.auto_merge_strategy).toBeUndefined();
});
@@ -471,8 +469,8 @@ describe('ReadyToMerge', () => {
expect(eventHub.$emit).toHaveBeenCalledWith('SetBranchRemoveFlag', [false]);
- expect(cpc).toBeFalsy();
- expect(spc).toBeTruthy();
+ expect(cpc).toBe(false);
+ expect(spc).toBe(true);
});
it('should continue polling until MR is merged', async () => {
@@ -494,8 +492,8 @@ describe('ReadyToMerge', () => {
await waitForPromises();
- expect(cpc).toBeTruthy();
- expect(spc).toBeFalsy();
+ expect(cpc).toBe(true);
+ expect(spc).toBe(false);
});
});
});
@@ -529,13 +527,13 @@ describe('ReadyToMerge', () => {
mr: { commitsCount: 2, enableSquashBeforeMerge: true },
});
- expect(findCheckboxElement().exists()).toBeTruthy();
+ expect(findCheckboxElement().exists()).toBe(true);
});
it('should not be rendered when squash before merge is disabled', () => {
createComponent({ mr: { commitsCount: 2, enableSquashBeforeMerge: false } });
- expect(findCheckboxElement().exists()).toBeFalsy();
+ expect(findCheckboxElement().exists()).toBe(false);
});
it('should be rendered when there is only 1 commit', () => {
@@ -576,71 +574,9 @@ describe('ReadyToMerge', () => {
});
});
- describe('commits count collapsible header', () => {
- it('should be rendered when fast-forward is disabled', () => {
- createComponent();
-
- expect(findCommitsHeaderElement().exists()).toBeTruthy();
- });
-
- describe('when fast-forward is enabled', () => {
- it('should be rendered if squash and squash before are enabled and there is more than 1 commit', () => {
- createComponent({
- mr: {
- ffOnlyEnabled: true,
- enableSquashBeforeMerge: true,
- squashIsSelected: true,
- commitsCount: 2,
- },
- });
-
- expect(findCommitsHeaderElement().exists()).toBeTruthy();
- });
-
- it('should not be rendered if squash before merge is disabled', () => {
- createComponent({
- mr: {
- ffOnlyEnabled: true,
- enableSquashBeforeMerge: false,
- squash: true,
- commitsCount: 2,
- },
- });
-
- expect(findCommitsHeaderElement().exists()).toBeFalsy();
- });
-
- it('should not be rendered if squash is disabled', () => {
- createComponent({
- mr: {
- ffOnlyEnabled: true,
- squash: false,
- enableSquashBeforeMerge: true,
- commitsCount: 2,
- },
- });
-
- expect(findCommitsHeaderElement().exists()).toBeFalsy();
- });
-
- it('should not be rendered if commits count is 1', () => {
- createComponent({
- mr: {
- ffOnlyEnabled: true,
- squash: true,
- enableSquashBeforeMerge: true,
- commitsCount: 1,
- },
- });
-
- expect(findCommitsHeaderElement().exists()).toBeFalsy();
- });
- });
- });
-
describe('commits edit components', () => {
describe('when fast-forward merge is enabled', () => {
- it('should not be rendered if squash is disabled', () => {
+ it('should not be rendered if squash is disabled', async () => {
createComponent({
mr: {
ffOnlyEnabled: true,
@@ -679,7 +615,7 @@ describe('ReadyToMerge', () => {
expect(findCommitEditElements().length).toBe(0);
});
- it('should have one edit component if squash is enabled and there is more than 1 commit', () => {
+ it('should have one edit component if squash is enabled and there is more than 1 commit', async () => {
createComponent({
mr: {
ffOnlyEnabled: true,
@@ -689,18 +625,14 @@ describe('ReadyToMerge', () => {
},
});
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
+
expect(findCommitEditElements().length).toBe(1);
expect(findFirstCommitEditLabel()).toBe('Squash commit message');
});
});
- it('should have one edit component when squash is disabled', () => {
- createComponent();
-
- expect(findCommitEditElements().length).toBe(1);
- });
-
- it('should have two edit components when squash is enabled and there is more than 1 commit', () => {
+ it('should have two edit components when squash is enabled and there is more than 1 commit', async () => {
createComponent({
mr: {
commitsCount: 2,
@@ -709,6 +641,8 @@ describe('ReadyToMerge', () => {
},
});
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
+
expect(findCommitEditElements().length).toBe(2);
});
@@ -738,11 +672,12 @@ describe('ReadyToMerge', () => {
},
});
await nextTick();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
expect(findCommitEditElements().length).toBe(2);
});
- it('should have one edit components when squash is enabled and there is 1 commit only', () => {
+ it('should have one edit components when squash is enabled and there is 1 commit only', async () => {
createComponent({
mr: {
commitsCount: 1,
@@ -751,16 +686,12 @@ describe('ReadyToMerge', () => {
},
});
- expect(findCommitEditElements().length).toBe(1);
- });
-
- it('should have correct edit merge commit label', () => {
- createComponent();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
- expect(findFirstCommitEditLabel()).toBe('Merge commit message');
+ expect(findCommitEditElements().length).toBe(1);
});
- it('should have correct edit squash commit label', () => {
+ it('should have correct edit squash commit label', async () => {
createComponent({
mr: {
commitsCount: 2,
@@ -769,6 +700,8 @@ describe('ReadyToMerge', () => {
},
});
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
+
expect(findFirstCommitEditLabel()).toBe('Squash commit message');
});
});
@@ -777,48 +710,26 @@ describe('ReadyToMerge', () => {
it('should not be rendered if squash is disabled', () => {
createComponent();
- expect(findCommitDropdownElement().exists()).toBeFalsy();
+ expect(findCommitDropdownElement().exists()).toBe(false);
});
- it('should be rendered if squash is enabled and there is more than 1 commit', () => {
+ it('should be rendered if squash is enabled and there is more than 1 commit', async () => {
createComponent({
mr: { enableSquashBeforeMerge: true, squashIsSelected: true, commitsCount: 2 },
});
- expect(findCommitDropdownElement().exists()).toBeTruthy();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
+
+ expect(findCommitDropdownElement().exists()).toBe(true);
});
});
- it('renders a tip including a link to docs on templates', () => {
+ it('renders a tip including a link to docs on templates', async () => {
createComponent();
- expect(findTipLink().exists()).toBe(true);
- });
- });
-
- describe('Merge request project settings', () => {
- describe('when the merge commit merge method is enabled', () => {
- beforeEach(() => {
- createComponent({
- mr: { ffOnlyEnabled: false },
- });
- });
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
- it('should not show fast forward message', () => {
- expect(wrapper.find('.mr-fast-forward-message').exists()).toBe(false);
- });
- });
-
- describe('when the fast-forward merge method is enabled', () => {
- beforeEach(() => {
- createComponent({
- mr: { ffOnlyEnabled: true },
- });
- });
-
- it('should show fast forward message', () => {
- expect(wrapper.find('.mr-fast-forward-message').exists()).toBe(true);
- });
+ expect(findTipLink().exists()).toBe(true);
});
});
@@ -873,6 +784,7 @@ describe('ReadyToMerge', () => {
createDefaultGqlComponent();
await waitForPromises();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
expect(finderFn()).toBe(initialValue);
});
@@ -880,6 +792,7 @@ describe('ReadyToMerge', () => {
it('should have updated value after graphql refetch', async () => {
createDefaultGqlComponent();
await waitForPromises();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
triggerApprovalUpdated();
await waitForPromises();
@@ -890,6 +803,7 @@ describe('ReadyToMerge', () => {
it('should not update if user has touched', async () => {
createDefaultGqlComponent();
await waitForPromises();
+ await wrapper.find('[data-testid="widget_edit_commit_message"]').vm.$emit('input', true);
const input = wrapper.find(inputId);
input.element.value = USER_COMMIT_MESSAGE;
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_sha_mismatch_spec.js
index 2a343997cf5..2a343997cf5 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_sha_mismatch_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_sha_mismatch_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js
index 6ea2e8675d3..6ea2e8675d3 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_squash_before_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_squash_before_merge_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js
index e2d79c61b9b..e2d79c61b9b 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_unresolved_discussions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_unresolved_discussions_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js
index 4998147c6b6..af52901f508 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_wip_spec.js
@@ -26,11 +26,11 @@ describe('Wip', () => {
it('should have props', () => {
const { mr, service } = WorkInProgress.props;
- expect(mr.type instanceof Object).toBeTruthy();
- expect(mr.required).toBeTruthy();
+ expect(mr.type instanceof Object).toBe(true);
+ expect(mr.required).toBe(true);
- expect(service.type instanceof Object).toBeTruthy();
- expect(service.required).toBeTruthy();
+ expect(service.type instanceof Object).toBe(true);
+ expect(service.required).toBe(true);
});
});
@@ -64,7 +64,7 @@ describe('Wip', () => {
await waitForPromises();
- expect(vm.isMakingRequest).toBeTruthy();
+ expect(vm.isMakingRequest).toBe(true);
expect(eventHub.$emit).toHaveBeenCalledWith('UpdateWidgetData', mrObj);
expect(toast).toHaveBeenCalledWith('Marked as ready. Merging is now allowed.');
});
@@ -81,12 +81,10 @@ describe('Wip', () => {
});
it('should have correct elements', () => {
- expect(el.classList.contains('mr-widget-body')).toBeTruthy();
+ expect(el.classList.contains('mr-widget-body')).toBe(true);
expect(el.innerText).toContain(
"Merge blocked: merge request must be marked as ready. It's still marked as draft.",
);
- expect(el.querySelector('button').getAttribute('disabled')).toBeTruthy();
- expect(el.querySelector('button').innerText).toContain('Merge');
expect(el.querySelector('.js-remove-draft').innerText.replace(/\s\s+/g, ' ')).toContain(
'Mark as ready',
);
@@ -97,7 +95,7 @@ describe('Wip', () => {
await nextTick();
- expect(el.querySelector('.js-remove-draft')).toEqual(null);
+ expect(el.querySelector('.js-remove-draft')).toBeNull();
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/new_ready_to_merge_spec.js b/spec/frontend/vue_merge_request_widget/components/states/new_ready_to_merge_spec.js
index 5ec9654a4af..5ec9654a4af 100644
--- a/spec/frontend/vue_mr_widget/components/states/new_ready_to_merge_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/new_ready_to_merge_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mock_data.js b/spec/frontend/vue_merge_request_widget/components/terraform/mock_data.js
index 8e46af5dfd6..8e46af5dfd6 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/components/terraform/mock_data.js
diff --git a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js b/spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js
index 8f20d6a8fc9..8f20d6a8fc9 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/mr_widget_terraform_container_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/terraform/mr_widget_terraform_container_spec.js
diff --git a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js b/spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js
index 3c9f6c2e165..3c9f6c2e165 100644
--- a/spec/frontend/vue_mr_widget/components/terraform/terraform_plan_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/terraform/terraform_plan_spec.js
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
new file mode 100644
index 00000000000..6bb718082a4
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/widget/app_spec.js
@@ -0,0 +1,19 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import App from '~/vue_merge_request_widget/components/widget/app.vue';
+
+describe('MR Widget App', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(App, {
+ propsData: {
+ mr: {},
+ },
+ });
+ };
+
+ it('mounts the component', () => {
+ createComponent();
+ expect(wrapper.findByTestId('mr-widget-app').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
new file mode 100644
index 00000000000..3c08ffdef18
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
@@ -0,0 +1,167 @@
+import { nextTick } from 'vue';
+import * as Sentry from '@sentry/browser';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
+import Widget from '~/vue_merge_request_widget/components/widget/widget.vue';
+
+describe('MR Widget', () => {
+ let wrapper;
+
+ const findStatusIcon = () => wrapper.findComponent(StatusIcon);
+
+ const createComponent = ({ propsData, slots } = {}) => {
+ wrapper = shallowMountExtended(Widget, {
+ propsData: {
+ loadingText: 'Loading widget',
+ widgetName: 'MyWidget',
+ value: {
+ collapsed: null,
+ expanded: null,
+ },
+ ...propsData,
+ },
+ slots,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('on mount', () => {
+ it('fetches collapsed', async () => {
+ const fetchCollapsedData = jest
+ .fn()
+ .mockReturnValue(Promise.resolve({ headers: {}, status: 200, data: {} }));
+
+ createComponent({ propsData: { fetchCollapsedData } });
+ await waitForPromises();
+ expect(fetchCollapsedData).toHaveBeenCalled();
+ expect(wrapper.vm.error).toBe(null);
+ });
+
+ it('sets the error text when fetch method fails', async () => {
+ const fetchCollapsedData = jest.fn().mockReturnValue(() => Promise.reject());
+ createComponent({ propsData: { fetchCollapsedData } });
+ await waitForPromises();
+ expect(wrapper.vm.error).toBe('Failed to load');
+ });
+
+ it('displays loading icon until request is made and then displays status icon when the request is complete', async () => {
+ const fetchCollapsedData = jest
+ .fn()
+ .mockReturnValue(Promise.resolve({ headers: {}, status: 200, data: {} }));
+
+ createComponent({ propsData: { fetchCollapsedData, statusIconName: 'warning' } });
+
+ // Let on mount be called
+ await nextTick();
+
+ expect(findStatusIcon().props('isLoading')).toBe(true);
+
+ // Wait until `fetchCollapsedData` is resolved
+ await waitForPromises();
+
+ expect(findStatusIcon().props('isLoading')).toBe(false);
+ expect(findStatusIcon().props('iconName')).toBe('warning');
+ });
+
+ it('displays the loading text', async () => {
+ const fetchCollapsedData = jest.fn().mockReturnValue(() => Promise.reject());
+ createComponent({ propsData: { fetchCollapsedData, statusIconName: 'warning' } });
+ expect(wrapper.text()).not.toContain('Loading');
+ await nextTick();
+ expect(wrapper.text()).toContain('Loading');
+ });
+ });
+
+ describe('fetch', () => {
+ it('sets the data.collapsed property after a successfull call - multiPolling: false', async () => {
+ const mockData = { headers: {}, status: 200, data: { vulnerabilities: [] } };
+ createComponent({ propsData: { fetchCollapsedData: async () => mockData } });
+ await waitForPromises();
+ expect(wrapper.emitted('input')[0][0]).toEqual({ collapsed: mockData.data, expanded: null });
+ });
+
+ it('sets the data.collapsed property after a successfull call - multiPolling: true', async () => {
+ const mockData1 = { headers: {}, status: 200, data: { vulnerabilities: [{ vuln: 1 }] } };
+ const mockData2 = { headers: {}, status: 200, data: { vulnerabilities: [{ vuln: 2 }] } };
+
+ createComponent({
+ propsData: {
+ multiPolling: true,
+ fetchCollapsedData: () => [
+ () => Promise.resolve(mockData1),
+ () => Promise.resolve(mockData2),
+ ],
+ },
+ });
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('input')[0][0]).toEqual({
+ collapsed: [mockData1.data, mockData2.data],
+ expanded: null,
+ });
+ });
+
+ it('calls sentry when failed', async () => {
+ const error = new Error('Something went wrong');
+ jest.spyOn(Sentry, 'captureException').mockImplementation();
+ createComponent({
+ propsData: {
+ fetchCollapsedData: async () => Promise.reject(error),
+ },
+ });
+ await waitForPromises();
+ expect(wrapper.emitted('input')).toBeUndefined();
+ expect(Sentry.captureException).toHaveBeenCalledWith(error);
+ });
+ });
+
+ describe('content', () => {
+ it('displays summary property when summary slot is not provided', () => {
+ createComponent({
+ propsData: {
+ summary: 'Hello world',
+ fetchCollapsedData: async () => Promise.resolve(),
+ },
+ });
+
+ expect(wrapper.findByTestId('widget-extension-top-level-summary').text()).toBe('Hello world');
+ });
+
+ it.todo('displays content property when content slot is not provided');
+
+ it('displays the summary slot when provided', () => {
+ createComponent({
+ propsData: {
+ fetchCollapsedData: async () => Promise.resolve(),
+ },
+ slots: {
+ summary: '<b>More complex summary</b>',
+ },
+ });
+
+ expect(wrapper.findByTestId('widget-extension-top-level-summary').text()).toBe(
+ 'More complex summary',
+ );
+ });
+
+ it('displays the content slot when provided', () => {
+ createComponent({
+ propsData: {
+ fetchCollapsedData: async () => Promise.resolve(),
+ },
+ slots: {
+ content: '<b>More complex content</b>',
+ },
+ });
+
+ expect(wrapper.findByTestId('widget-extension-collapsed-section').text()).toBe(
+ 'More complex content',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
index 7e7438bcc0f..7e7438bcc0f 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_action_button_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_action_button_spec.js
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
index a285d26f404..a285d26f404 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_actions_spec.js
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_list_spec.js
index 948d7ebab5e..948d7ebab5e 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_list_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_list_spec.js
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
index e98b1160ae4..e98b1160ae4 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_mock_data.js
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
index c27cbd8b781..c27cbd8b781 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_spec.js
diff --git a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js b/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js
index eb6e3711e2e..eb6e3711e2e 100644
--- a/spec/frontend/vue_mr_widget/deployment/deployment_view_button_spec.js
+++ b/spec/frontend/vue_merge_request_widget/deployment/deployment_view_button_spec.js
diff --git a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
index 5c1d3c8e8e8..5c1d3c8e8e8 100644
--- a/spec/frontend/vue_mr_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
diff --git a/spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js b/spec/frontend/vue_merge_request_widget/extensions/test_report/utils_spec.js
index 69ea70549fe..69ea70549fe 100644
--- a/spec/frontend/vue_mr_widget/extensions/test_report/utils_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extensions/test_report/utils_spec.js
diff --git a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js
index a06ad930abe..a06ad930abe 100644
--- a/spec/frontend/vue_mr_widget/extentions/accessibility/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js
diff --git a/spec/frontend/vue_mr_widget/extentions/accessibility/mock_data.js b/spec/frontend/vue_merge_request_widget/extentions/accessibility/mock_data.js
index 06dc93d101f..06dc93d101f 100644
--- a/spec/frontend/vue_mr_widget/extentions/accessibility/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/accessibility/mock_data.js
diff --git a/spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
index 9a72e4a086b..9a72e4a086b 100644
--- a/spec/frontend/vue_mr_widget/extentions/code_quality/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
diff --git a/spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
index f5ad0ce7377..f5ad0ce7377 100644
--- a/spec/frontend/vue_mr_widget/extentions/code_quality/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
diff --git a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/terraform/index_spec.js
index d9faa7b2d25..d9faa7b2d25 100644
--- a/spec/frontend/vue_mr_widget/extentions/terraform/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/terraform/index_spec.js
diff --git a/spec/frontend/vue_mr_widget/mock_data.js b/spec/frontend/vue_merge_request_widget/mock_data.js
index 20d00a116bb..20d00a116bb 100644
--- a/spec/frontend/vue_mr_widget/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/mock_data.js
diff --git a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
index 295b9df30b9..295b9df30b9 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
index b3af5eba364..819841317f9 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_options_spec.js
@@ -20,7 +20,6 @@ import {
import { SUCCESS } from '~/vue_merge_request_widget/components/deployment/constants';
import eventHub from '~/vue_merge_request_widget/event_hub';
import MrWidgetOptions from '~/vue_merge_request_widget/mr_widget_options.vue';
-import { stateKey } from '~/vue_merge_request_widget/stores/state_maps';
import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
import securityReportMergeRequestDownloadPathsQuery from '~/vue_shared/security_reports/graphql/queries/security_report_merge_request_download_paths.query.graphql';
import { faviconDataUrl, overlayDataUrl } from '../lib/utils/mock_data';
@@ -125,25 +124,13 @@ describe('MrWidgetOptions', () => {
it('should return true when hasCI is true', () => {
wrapper.vm.mr.hasCI = true;
- expect(wrapper.vm.shouldRenderPipelines).toBeTruthy();
+ expect(wrapper.vm.shouldRenderPipelines).toBe(true);
});
it('should return false when hasCI is false', () => {
wrapper.vm.mr.hasCI = false;
- expect(wrapper.vm.shouldRenderPipelines).toBeFalsy();
- });
- });
-
- describe('shouldRenderRelatedLinks', () => {
- it('should return false for the initial data', () => {
- expect(wrapper.vm.shouldRenderRelatedLinks).toBeFalsy();
- });
-
- it('should return true if there is relatedLinks in MR', () => {
- Vue.set(wrapper.vm.mr, 'relatedLinks', {});
-
- expect(wrapper.vm.shouldRenderRelatedLinks).toBeTruthy();
+ expect(wrapper.vm.shouldRenderPipelines).toBe(false);
});
});
@@ -316,7 +303,7 @@ describe('MrWidgetOptions', () => {
expect(wrapper.vm.service.checkStatus).toHaveBeenCalled();
expect(wrapper.vm.mr.setData).toHaveBeenCalled();
expect(wrapper.vm.handleNotification).toHaveBeenCalledWith(mockData);
- expect(isCbExecuted).toBeTruthy();
+ expect(isCbExecuted).toBe(true);
});
});
});
@@ -519,61 +506,6 @@ describe('MrWidgetOptions', () => {
});
});
- describe('rendering relatedLinks', () => {
- beforeEach(() => {
- return createComponent({
- ...mockData,
- issues_links: {
- closing: `
- <a class="close-related-link" href="#">
- Close
- </a>
- `,
- },
- });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders if there are relatedLinks', () => {
- expect(wrapper.find('.close-related-link').exists()).toBe(true);
- });
-
- it('does not render if state is nothingToMerge', async () => {
- wrapper.vm.mr.state = stateKey.nothingToMerge;
- await nextTick();
- expect(wrapper.find('.close-related-link').exists()).toBe(false);
- });
- });
-
- describe('rendering source branch removal status', () => {
- it('renders when user cannot remove branch and branch should be removed', async () => {
- wrapper.vm.mr.canRemoveSourceBranch = false;
- wrapper.vm.mr.shouldRemoveSourceBranch = true;
- wrapper.vm.mr.state = 'readyToMerge';
-
- await nextTick();
- const tooltip = wrapper.find('[data-testid="question-o-icon"]');
-
- expect(wrapper.text()).toContain('Deletes the source branch');
- expect(tooltip.attributes('title')).toBe(
- 'A user with write access to the source branch selected this option',
- );
- });
-
- it('does not render in merged state', async () => {
- wrapper.vm.mr.canRemoveSourceBranch = false;
- wrapper.vm.mr.shouldRemoveSourceBranch = true;
- wrapper.vm.mr.state = 'merged';
-
- await nextTick();
- expect(wrapper.text()).toContain('The source branch has been deleted');
- expect(wrapper.text()).not.toContain('Deletes the source branch');
- });
- });
-
describe('rendering deployments', () => {
const changes = [
{
@@ -1062,7 +994,7 @@ describe('MrWidgetOptions', () => {
await createComponent();
- expect(pollRequest).toHaveBeenCalledTimes(6);
+ expect(pollRequest).toHaveBeenCalledTimes(4);
});
});
@@ -1100,14 +1032,14 @@ describe('MrWidgetOptions', () => {
registerExtension(pollingErrorExtension);
await createComponent();
- expect(pollRequest).toHaveBeenCalledTimes(6);
+ expect(pollRequest).toHaveBeenCalledTimes(4);
});
it('captures sentry error and displays error when poll has failed', async () => {
registerExtension(pollingErrorExtension);
await createComponent();
- expect(Sentry.captureException).toHaveBeenCalledTimes(5);
+ expect(Sentry.captureException).toHaveBeenCalled();
expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
@@ -1126,7 +1058,7 @@ describe('MrWidgetOptions', () => {
expect(
wrapper.find('[data-testid="widget-extension"] [data-testid="toggle-button"]').exists(),
).toBe(false);
- expect(Sentry.captureException).toHaveBeenCalledTimes(5);
+ expect(Sentry.captureException).toHaveBeenCalled();
expect(Sentry.captureException).toHaveBeenCalledWith(new Error('Fetch error'));
expect(wrapper.findComponent(StatusIcon).props('iconName')).toBe('failed');
});
diff --git a/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/actions_spec.js
index 22562bb4ddb..22562bb4ddb 100644
--- a/spec/frontend/vue_mr_widget/stores/artifacts_list/actions_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/actions_spec.js
diff --git a/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/getters_spec.js
index dc90fef63c6..dc90fef63c6 100644
--- a/spec/frontend/vue_mr_widget/stores/artifacts_list/getters_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/getters_spec.js
diff --git a/spec/frontend/vue_mr_widget/stores/artifacts_list/mutations_spec.js b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/mutations_spec.js
index a4e6788c7f6..a4e6788c7f6 100644
--- a/spec/frontend/vue_mr_widget/stores/artifacts_list/mutations_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/artifacts_list/mutations_spec.js
diff --git a/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
index fc760f5c5be..0246a8d4b0f 100644
--- a/spec/frontend/vue_mr_widget/stores/get_state_key_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/get_state_key_spec.js
@@ -25,10 +25,6 @@ describe('getStateKey', () => {
expect(bound()).toEqual('readyToMerge');
- context.canMerge = false;
-
- expect(bound()).toEqual('notAllowedToMerge');
-
context.autoMergeEnabled = true;
context.hasMergeableDiscussionsState = true;
@@ -105,22 +101,4 @@ describe('getStateKey', () => {
expect(bound()).toEqual('rebase');
});
-
- it.each`
- canMerge | isSHAMismatch | stateKey
- ${true} | ${true} | ${'shaMismatch'}
- ${false} | ${true} | ${'notAllowedToMerge'}
- ${false} | ${false} | ${'notAllowedToMerge'}
- `(
- 'returns $stateKey when canMerge is $canMerge and isSHAMismatch is $isSHAMismatch',
- ({ canMerge, isSHAMismatch, stateKey }) => {
- const bound = getStateKey.bind({
- canMerge,
- isSHAMismatch,
- commitsCount: 2,
- });
-
- expect(bound()).toEqual(stateKey);
- },
- );
});
diff --git a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js b/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js
index 3cdb4265ef0..3cdb4265ef0 100644
--- a/spec/frontend/vue_mr_widget/stores/mr_widget_store_spec.js
+++ b/spec/frontend/vue_merge_request_widget/stores/mr_widget_store_spec.js
diff --git a/spec/frontend/vue_mr_widget/test_extensions.js b/spec/frontend/vue_merge_request_widget/test_extensions.js
index 1977f550577..1977f550577 100644
--- a/spec/frontend/vue_mr_widget/test_extensions.js
+++ b/spec/frontend/vue_merge_request_widget/test_extensions.js
diff --git a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
deleted file mode 100644
index 56a0218b374..00000000000
--- a/spec/frontend/vue_mr_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
+++ /dev/null
@@ -1,145 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`MRWidgetAutoMergeEnabled when graphql is disabled template should have correct elements 1`] = `
-<div
- class="mr-widget-body media"
->
- <gl-icon-stub
- class="gl-text-blue-500 gl-mr-3 gl-mt-1"
- name="status_scheduled"
- size="24"
- />
-
- <div
- class="media-body"
- >
- <h4
- class="gl-display-flex"
- >
- <span
- class="gl-mr-3"
- >
- <gl-sprintf-stub
- data-testid="statusText"
- message="Set by %{merge_author} to be merged automatically when the pipeline succeeds"
- />
- </span>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-cancel-auto-merge"
- data-qa-selector="cancel_auto_merge_button"
- data-testid="cancelAutomaticMergeButton"
- icon=""
- size="small"
- variant="default"
- >
-
- Cancel auto-merge
-
- </gl-button-stub>
- </h4>
-
- <section
- class="mr-info-list"
- >
- <p
- class="gl-display-flex"
- >
- <span
- class="gl-mr-3"
- >
- Does not delete the source branch
- </span>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-remove-source-branch"
- data-testid="removeSourceBranchButton"
- icon=""
- size="small"
- variant="default"
- >
-
- Delete source branch
-
- </gl-button-stub>
- </p>
- </section>
- </div>
-</div>
-`;
-
-exports[`MRWidgetAutoMergeEnabled when graphql is enabled template should have correct elements 1`] = `
-<div
- class="mr-widget-body media"
->
- <gl-icon-stub
- class="gl-text-blue-500 gl-mr-3 gl-mt-1"
- name="status_scheduled"
- size="24"
- />
-
- <div
- class="media-body"
- >
- <h4
- class="gl-display-flex"
- >
- <span
- class="gl-mr-3"
- >
- <gl-sprintf-stub
- data-testid="statusText"
- message="Set by %{merge_author} to be merged automatically when the pipeline succeeds"
- />
- </span>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-cancel-auto-merge"
- data-qa-selector="cancel_auto_merge_button"
- data-testid="cancelAutomaticMergeButton"
- icon=""
- size="small"
- variant="default"
- >
-
- Cancel auto-merge
-
- </gl-button-stub>
- </h4>
-
- <section
- class="mr-info-list"
- >
- <p
- class="gl-display-flex"
- >
- <span
- class="gl-mr-3"
- >
- Does not delete the source branch
- </span>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- class="js-remove-source-branch"
- data-testid="removeSourceBranchButton"
- icon=""
- size="small"
- variant="default"
- >
-
- Delete source branch
-
- </gl-button-stub>
- </p>
- </section>
- </div>
-</div>
-`;
diff --git a/spec/frontend/vue_shared/alert_details/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
index ce51af31a70..59e21b2ff40 100644
--- a/spec/frontend/vue_shared/alert_details/alert_details_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
@@ -285,14 +285,14 @@ describe('AlertDetails', () => {
});
it('displays a loading state when loading', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
describe('error state', () => {
it('displays a error state correctly', () => {
mountComponent({ data: { errored: true } });
- expect(wrapper.find(GlAlert).exists()).toBe(true);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(true);
});
it('renders html-errors correctly', () => {
@@ -304,7 +304,7 @@ describe('AlertDetails', () => {
it('does not display an error when dismissed', () => {
mountComponent({ data: { errored: true, isErrorDismissed: true } });
- expect(wrapper.find(GlAlert).exists()).toBe(false);
+ expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
});
diff --git a/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js b/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js
index 1216681038f..cf04c1eb24a 100644
--- a/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_metrics_spec.js
@@ -28,8 +28,8 @@ describe('Alert Metrics', () => {
});
}
- const findChart = () => wrapper.find(MetricEmbed);
- const findEmptyState = () => wrapper.find({ ref: 'emptyState' });
+ const findChart = () => wrapper.findComponent(MetricEmbed);
+ const findEmptyState = () => wrapper.findComponent({ ref: 'emptyState' });
afterEach(() => {
if (wrapper) {
diff --git a/spec/frontend/vue_shared/alert_details/alert_status_spec.js b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
index ba3b0335a8e..2a37ff2b784 100644
--- a/spec/frontend/vue_shared/alert_details/alert_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
@@ -13,7 +13,7 @@ describe('AlertManagementStatus', () => {
let wrapper;
const findStatusDropdown = () => wrapper.findComponent(GlDropdown);
const findFirstStatusOption = () => findStatusDropdown().findComponent(GlDropdownItem);
- const findAllStatusOptions = () => findStatusDropdown().findAll(GlDropdownItem);
+ const findAllStatusOptions = () => findStatusDropdown().findAllComponents(GlDropdownItem);
const findStatusDropdownHeader = () => wrapper.findByTestId('dropdown-header');
const selectFirstStatusOption = () => {
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js
index 29569734621..5a0ee5a59ba 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_assignees_spec.js
@@ -128,7 +128,7 @@ describe('Alert Details Sidebar Assignees', () => {
wrapper.setData({ isDropdownSearching: false });
await nextTick();
- wrapper.find(SidebarAssignee).vm.$emit('update-alert-assignees', 'root');
+ wrapper.findComponent(SidebarAssignee).vm.$emit('update-alert-assignees', 'root');
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: AlertSetAssignees,
@@ -156,7 +156,7 @@ describe('Alert Details Sidebar Assignees', () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue(errorMutationResult);
await nextTick();
- const SideBarAssigneeItem = wrapper.findAll(SidebarAssignee).at(0);
+ const SideBarAssigneeItem = wrapper.findAllComponents(SidebarAssignee).at(0);
await SideBarAssigneeItem.vm.$emit('update-alert-assignees');
expect(wrapper.emitted('alert-error')).toBeDefined();
});
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
index ef75e038bff..3b38349622f 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
@@ -65,7 +65,7 @@ describe('Alert Details Sidebar', () => {
mountMethod: mount,
alert: mockAlert,
});
- expect(wrapper.find(SidebarAssignees).exists()).toBe(true);
+ expect(wrapper.findComponent(SidebarAssignees).exists()).toBe(true);
});
it('should render side bar status dropdown', () => {
@@ -73,7 +73,7 @@ describe('Alert Details Sidebar', () => {
mountMethod: mount,
alert: mockAlert,
});
- expect(wrapper.find(SidebarStatus).exists()).toBe(true);
+ expect(wrapper.findComponent(SidebarStatus).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js b/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js
index a5a9fb55737..6a750bb99c0 100644
--- a/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js
+++ b/spec/frontend/vue_shared/alert_details/system_notes/alert_management_system_note_spec.js
@@ -31,7 +31,7 @@ describe('Alert Details System Note', () => {
it('renders the correct system note', () => {
const noteId = wrapper.find('.note-wrapper').attributes('id');
- const iconName = wrapper.find(GlIcon).attributes('name');
+ const iconName = wrapper.findComponent(GlIcon).attributes('name');
expect(noteId).toBe('note_1628');
expect(iconName).toBe(mockAlert.notes.nodes[0].systemNoteIconName);
diff --git a/spec/frontend/vue_shared/components/actions_button_spec.js b/spec/frontend/vue_shared/components/actions_button_spec.js
index e5b7b693cb5..07c53c04723 100644
--- a/spec/frontend/vue_shared/components/actions_button_spec.js
+++ b/spec/frontend/vue_shared/components/actions_button_spec.js
@@ -45,9 +45,9 @@ describe('Actions button component', () => {
return directiveBinding.value;
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
const findButtonTooltip = () => getTooltip(findButton());
- const findDropdown = () => wrapper.find(GlDropdown);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownTooltip = () => getTooltip(findDropdown());
const parseDropdownItems = () =>
findDropdown()
diff --git a/spec/frontend/vue_shared/components/alert_details_table_spec.js b/spec/frontend/vue_shared/components/alert_details_table_spec.js
index b9a8a5bee97..8a9ee4699bd 100644
--- a/spec/frontend/vue_shared/components/alert_details_table_spec.js
+++ b/spec/frontend/vue_shared/components/alert_details_table_spec.js
@@ -74,7 +74,7 @@ describe('AlertDetails', () => {
});
it('displays a loading state when loading', () => {
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
});
@@ -130,7 +130,7 @@ describe('AlertDetails', () => {
environmentData = { name: null, path: null };
mountComponent();
- expect(findTableFieldValueByKey('Environment').text()).toBeFalsy();
+ expect(findTableFieldValueByKey('Environment').text()).toBe('');
});
});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
index d14f3e5559f..ce7fd40937f 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/rich_viewer_spec.js
@@ -43,6 +43,6 @@ describe('Blob Rich Viewer component', () => {
});
it('is using Markdown View Field', () => {
- expect(wrapper.find(MarkdownFieldView).exists()).toBe(true);
+ expect(wrapper.findComponent(MarkdownFieldView).exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/components/changed_file_icon_spec.js b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
index 6b9658a6d18..ea708b6f3fe 100644
--- a/spec/frontend/vue_shared/components/changed_file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
@@ -25,7 +25,7 @@ describe('Changed file icon', () => {
wrapper.destroy();
});
- const findIcon = () => wrapper.find(GlIcon);
+ const findIcon = () => wrapper.findComponent(GlIcon);
const findIconName = () => findIcon().props('name');
const findIconClasses = () => findIcon().classes();
const findTooltipText = () => wrapper.attributes('title');
@@ -51,7 +51,7 @@ describe('Changed file icon', () => {
showTooltip: false,
});
- expect(findTooltipText()).toBeFalsy();
+ expect(findTooltipText()).toBeUndefined();
});
describe.each`
@@ -87,7 +87,7 @@ describe('Changed file icon', () => {
});
it('does not have tooltip text', () => {
- expect(findTooltipText()).toBeFalsy();
+ expect(findTooltipText()).toBeUndefined();
});
});
diff --git a/spec/frontend/vue_shared/components/ci_icon_spec.js b/spec/frontend/vue_shared/components/ci_icon_spec.js
index 1b502f9587c..2064bee9673 100644
--- a/spec/frontend/vue_shared/components/ci_icon_spec.js
+++ b/spec/frontend/vue_shared/components/ci_icon_spec.js
@@ -22,7 +22,7 @@ describe('CI Icon component', () => {
});
expect(wrapper.find('span').exists()).toBe(true);
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
});
describe('active icons', () => {
diff --git a/spec/frontend/vue_shared/components/clipboard_button_spec.js b/spec/frontend/vue_shared/components/clipboard_button_spec.js
index fca5e664a96..b18b00e70bb 100644
--- a/spec/frontend/vue_shared/components/clipboard_button_spec.js
+++ b/spec/frontend/vue_shared/components/clipboard_button_spec.js
@@ -21,7 +21,7 @@ describe('clipboard button', () => {
});
};
- const findButton = () => wrapper.find(GlButton);
+ const findButton = () => wrapper.findComponent(GlButton);
const expectConfirmationTooltip = async ({ event, message }) => {
const title = 'Copy this value';
diff --git a/spec/frontend/vue_shared/components/clone_dropdown_spec.js b/spec/frontend/vue_shared/components/clone_dropdown_spec.js
index eefd1838988..31c08260dd0 100644
--- a/spec/frontend/vue_shared/components/clone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/clone_dropdown_spec.js
@@ -38,9 +38,9 @@ describe('Clone Dropdown Button', () => {
${'HTTP'} | ${1} | ${httpLink}
`('renders correct link and a copy-button for $name', ({ index, value }) => {
createComponent();
- const group = wrapper.findAll(GlFormInputGroup).at(index);
+ const group = wrapper.findAllComponents(GlFormInputGroup).at(index);
expect(group.props('value')).toBe(value);
- expect(group.find(GlFormInputGroup).exists()).toBe(true);
+ expect(group.findComponent(GlFormInputGroup).exists()).toBe(true);
});
it.each`
@@ -50,8 +50,8 @@ describe('Clone Dropdown Button', () => {
`('does not fail if only $name is set', ({ name, value }) => {
createComponent({ [name]: value });
- expect(wrapper.find(GlFormInputGroup).props('value')).toBe(value);
- expect(wrapper.findAll(GlDropdownSectionHeader).length).toBe(1);
+ expect(wrapper.findComponent(GlFormInputGroup).props('value')).toBe(value);
+ expect(wrapper.findAllComponents(GlDropdownSectionHeader).length).toBe(1);
});
});
@@ -63,12 +63,12 @@ describe('Clone Dropdown Button', () => {
`('allows null values for the props', ({ name, value }) => {
createComponent({ ...defaultPropsData, [name]: value });
- expect(wrapper.findAll(GlDropdownSectionHeader).length).toBe(1);
+ expect(wrapper.findAllComponents(GlDropdownSectionHeader).length).toBe(1);
});
it('correctly calculates httpLabel for HTTPS protocol', () => {
createComponent({ httpLink: httpsLink });
- expect(wrapper.find(GlDropdownSectionHeader).text()).toContain('HTTPS');
+ expect(wrapper.findComponent(GlDropdownSectionHeader).text()).toContain('HTTPS');
});
});
});
diff --git a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
index 8cbe0630426..060048c4bbd 100644
--- a/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
+++ b/spec/frontend/vue_shared/components/color_picker/color_picker_spec.js
@@ -16,14 +16,14 @@ describe('ColorPicker', () => {
const setColor = '#000000';
const invalidText = 'Please enter a valid hex (#RRGGBB or #RGB) color value';
- const findGlFormGroup = () => wrapper.find(GlFormGroup);
+ const findGlFormGroup = () => wrapper.findComponent(GlFormGroup);
const colorPreview = () => wrapper.find('[data-testid="color-preview"]');
- const colorPicker = () => wrapper.find(GlFormInput);
+ const colorPicker = () => wrapper.findComponent(GlFormInput);
const colorInput = () => wrapper.find('input[type="color"]');
- const colorTextInput = () => wrapper.find(GlFormInputGroup).find('input[type="text"]');
+ const colorTextInput = () => wrapper.findComponent(GlFormInputGroup).find('input[type="text"]');
const invalidFeedback = () => wrapper.find('.invalid-feedback');
- const description = () => wrapper.find(GlFormGroup).attributes('description');
- const presetColors = () => wrapper.findAll(GlLink);
+ const description = () => wrapper.findComponent(GlFormGroup).attributes('description');
+ const presetColors = () => wrapper.findAllComponents(GlLink);
beforeEach(() => {
gon.suggested_label_colors = {
diff --git a/spec/frontend/vue_shared/components/commit_spec.js b/spec/frontend/vue_shared/components/commit_spec.js
index d91853e7b79..1893e127f6f 100644
--- a/spec/frontend/vue_shared/components/commit_spec.js
+++ b/spec/frontend/vue_shared/components/commit_spec.js
@@ -9,11 +9,11 @@ describe('Commit component', () => {
let wrapper;
const findIcon = (name) => {
- const icons = wrapper.findAll(GlIcon).filter((c) => c.attributes('name') === name);
+ const icons = wrapper.findAllComponents(GlIcon).filter((c) => c.attributes('name') === name);
return icons.length ? icons.at(0) : icons;
};
- const findUserAvatar = () => wrapper.find(UserAvatarLink);
+ const findUserAvatar = () => wrapper.findComponent(UserAvatarLink);
const findRefName = () => wrapper.findByTestId('ref-name');
const createComponent = (propsData) => {
@@ -47,7 +47,7 @@ describe('Commit component', () => {
},
});
- expect(wrapper.find('.icon-container').find(GlIcon).exists()).toBe(true);
+ expect(wrapper.find('.icon-container').findComponent(GlIcon).exists()).toBe(true);
});
describe('Given all the props', () => {
diff --git a/spec/frontend/vue_shared/components/confirm_modal_spec.js b/spec/frontend/vue_shared/components/confirm_modal_spec.js
index 3ca1c943398..c1e682a1aae 100644
--- a/spec/frontend/vue_shared/components/confirm_modal_spec.js
+++ b/spec/frontend/vue_shared/components/confirm_modal_spec.js
@@ -51,13 +51,13 @@ describe('vue_shared/components/confirm_modal', () => {
wrapper.destroy();
});
- const findModal = () => wrapper.find(GlModalStub);
+ const findModal = () => wrapper.findComponent(GlModalStub);
const findForm = () => wrapper.find('form');
const findFormData = () =>
findForm()
.findAll('input')
.wrappers.map((x) => ({ name: x.attributes('name'), value: x.attributes('value') }));
- const findDomElementListener = () => wrapper.find(DomElementListener);
+ const findDomElementListener = () => wrapper.findComponent(DomElementListener);
const triggerOpenWithEventHub = (modalData) => {
eventHub.$emit(EVENT_OPEN_CONFIRM_MODAL, modalData);
};
@@ -104,7 +104,7 @@ describe('vue_shared/components/confirm_modal', () => {
});
it('renders GlModal with data', () => {
- expect(findModal().exists()).toBeTruthy();
+ expect(findModal().exists()).toBe(true);
expect(findModal().attributes()).toEqual(
expect.objectContaining({
oktitle: MOCK_MODAL_DATA.modalAttributes.okTitle,
diff --git a/spec/frontend/vue_shared/components/dismissible_alert_spec.js b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
index 879d4aba441..8b1189f25d5 100644
--- a/spec/frontend/vue_shared/components/dismissible_alert_spec.js
+++ b/spec/frontend/vue_shared/components/dismissible_alert_spec.js
@@ -20,7 +20,7 @@ describe('vue_shared/components/dismissible_alert', () => {
wrapper.destroy();
});
- const findAlert = () => wrapper.find(GlAlert);
+ const findAlert = () => wrapper.findComponent(GlAlert);
describe('default', () => {
beforeEach(() => {
@@ -45,7 +45,7 @@ describe('vue_shared/components/dismissible_alert', () => {
});
it('emmits alertDismissed', () => {
- expect(wrapper.emitted('alertDismissed')).toBeTruthy();
+ expect(wrapper.emitted()).toHaveProperty('alertDismissed');
});
});
});
diff --git a/spec/frontend/vue_shared/components/dismissible_container_spec.js b/spec/frontend/vue_shared/components/dismissible_container_spec.js
index b8aeea38e77..f7030f38709 100644
--- a/spec/frontend/vue_shared/components/dismissible_container_spec.js
+++ b/spec/frontend/vue_shared/components/dismissible_container_spec.js
@@ -33,7 +33,7 @@ describe('DismissibleContainer', () => {
button.trigger('click');
- expect(wrapper.emitted().dismiss).toBeTruthy();
+ expect(wrapper.emitted().dismiss).toEqual(expect.any(Array));
});
});
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_button_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_button_spec.js
index 08e5d828b8f..e34ed31b4bf 100644
--- a/spec/frontend/vue_shared/components/dropdown/dropdown_button_spec.js
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_button_spec.js
@@ -1,80 +1,71 @@
-import Vue from 'vue';
+import { mount } from '@vue/test-utils';
+import DropdownButton from '~/vue_shared/components/dropdown/dropdown_button.vue';
-import { mountComponentWithSlots } from 'helpers/vue_mount_component_helper';
-import dropdownButtonComponent from '~/vue_shared/components/dropdown/dropdown_button.vue';
+describe('DropdownButton component', () => {
+ let wrapper;
-const defaultLabel = 'Select';
-const customLabel = 'Select project';
+ const defaultLabel = 'Select';
+ const customLabel = 'Select project';
-const createComponent = (props, slots = {}) => {
- const Component = Vue.extend(dropdownButtonComponent);
-
- return mountComponentWithSlots(Component, { props, slots });
-};
-
-describe('DropdownButtonComponent', () => {
- let vm;
-
- beforeEach(() => {
- vm = createComponent();
- });
+ const createComponent = (props, slots = {}) => {
+ wrapper = mount(DropdownButton, { propsData: props, slots });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('computed', () => {
describe('dropdownToggleText', () => {
it('returns default toggle text', () => {
- expect(vm.toggleText).toBe(defaultLabel);
+ createComponent();
+
+ expect(wrapper.vm.toggleText).toBe(defaultLabel);
});
it('returns custom toggle text when provided via props', () => {
- const vmEmptyLabels = createComponent({ toggleText: customLabel });
+ createComponent({ toggleText: customLabel });
- expect(vmEmptyLabels.toggleText).toBe(customLabel);
- vmEmptyLabels.$destroy();
+ expect(wrapper.vm.toggleText).toBe(customLabel);
});
});
});
describe('template', () => {
it('renders component container element of type `button`', () => {
- expect(vm.$el.nodeName).toBe('BUTTON');
+ createComponent();
+
+ expect(wrapper.element.nodeName).toBe('BUTTON');
});
it('renders component container element with required data attributes', () => {
- expect(vm.$el.dataset.abilityName).toBe(vm.abilityName);
- expect(vm.$el.dataset.fieldName).toBe(vm.fieldName);
- expect(vm.$el.dataset.issueUpdate).toBe(vm.updatePath);
- expect(vm.$el.dataset.labels).toBe(vm.labelsPath);
- expect(vm.$el.dataset.namespacePath).toBe(vm.namespace);
- expect(vm.$el.dataset.showAny).not.toBeDefined();
+ createComponent();
+
+ expect(wrapper.element.dataset.abilityName).toBe(wrapper.vm.abilityName);
+ expect(wrapper.element.dataset.fieldName).toBe(wrapper.vm.fieldName);
+ expect(wrapper.element.dataset.issueUpdate).toBe(wrapper.vm.updatePath);
+ expect(wrapper.element.dataset.labels).toBe(wrapper.vm.labelsPath);
+ expect(wrapper.element.dataset.namespacePath).toBe(wrapper.vm.namespace);
+ expect(wrapper.element.dataset.showAny).toBeUndefined();
});
it('renders dropdown toggle text element', () => {
- const dropdownToggleTextEl = vm.$el.querySelector('.dropdown-toggle-text');
+ createComponent();
- expect(dropdownToggleTextEl).not.toBeNull();
- expect(dropdownToggleTextEl.innerText.trim()).toBe(defaultLabel);
+ expect(wrapper.find('.dropdown-toggle-text').text()).toBe(defaultLabel);
});
it('renders dropdown button icon', () => {
- const dropdownIconEl = vm.$el.querySelector('[data-testid="chevron-down-icon"]');
+ createComponent();
- expect(dropdownIconEl).not.toBeNull();
+ expect(wrapper.find('[data-testid="chevron-down-icon"]').exists()).toBe(true);
});
it('renders slot, if default slot exists', () => {
- vm = createComponent(
- {},
- {
- default: ['Lorem Ipsum Dolar'],
- },
- );
-
- expect(vm.$el.querySelector('.dropdown-toggle-text')).toBeNull();
- expect(vm.$el).toHaveText('Lorem Ipsum Dolar');
+ createComponent({}, { default: ['Lorem Ipsum Dolar'] });
+
+ expect(wrapper.find('.dropdown-toggle-text').exists()).toBe(false);
+ expect(wrapper.text()).toBe('Lorem Ipsum Dolar');
});
});
});
diff --git a/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
index 084d0559665..dd3e55c82bb 100644
--- a/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
+++ b/spec/frontend/vue_shared/components/dropdown/dropdown_widget_spec.js
@@ -8,7 +8,7 @@ describe('DropdownWidget component', () => {
let wrapper;
const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
const findSearch = () => wrapper.findComponent(GlSearchBoxByType);
const createComponent = ({ props = {} } = {}) => {
diff --git a/spec/frontend/vue_shared/components/expand_button_spec.js b/spec/frontend/vue_shared/components/expand_button_spec.js
index 87d6ed6b21f..170c947e520 100644
--- a/spec/frontend/vue_shared/components/expand_button_spec.js
+++ b/spec/frontend/vue_shared/components/expand_button_spec.js
@@ -37,11 +37,11 @@ describe('Expand button', () => {
});
it('renders no text when short text is not provided', () => {
- expect(wrapper.find(ExpandButton).text()).toBe('');
+ expect(wrapper.findComponent(ExpandButton).text()).toBe('');
});
it('does not render expanded text', () => {
- expect(wrapper.find(ExpandButton).text().trim()).not.toBe(text.short);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).not.toBe(text.short);
});
describe('when short text is provided', () => {
@@ -55,13 +55,13 @@ describe('Expand button', () => {
});
it('renders short text', () => {
- expect(wrapper.find(ExpandButton).text().trim()).toBe(text.short);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).toBe(text.short);
});
it('renders button before text', () => {
expect(expanderPrependEl().isVisible()).toBe(true);
expect(expanderAppendEl().isVisible()).toBe(false);
- expect(wrapper.find(ExpandButton).element).toMatchSnapshot();
+ expect(wrapper.findComponent(ExpandButton).element).toMatchSnapshot();
});
});
@@ -81,7 +81,7 @@ describe('Expand button', () => {
});
it('renders the expanded text', () => {
- expect(wrapper.find(ExpandButton).text()).toContain(text.expanded);
+ expect(wrapper.findComponent(ExpandButton).text()).toContain(text.expanded);
});
describe('when short text is provided', () => {
@@ -98,13 +98,13 @@ describe('Expand button', () => {
});
it('only renders expanded text', () => {
- expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).toBe(text.expanded);
});
it('renders button after text', () => {
expect(expanderPrependEl().isVisible()).toBe(false);
expect(expanderAppendEl().isVisible()).toBe(true);
- expect(wrapper.find(ExpandButton).element).toMatchSnapshot();
+ expect(wrapper.findComponent(ExpandButton).element).toMatchSnapshot();
});
});
});
@@ -124,11 +124,11 @@ describe('Expand button', () => {
});
it('clicking hides expanded text', async () => {
- expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).toBe(text.expanded);
expanderAppendEl().trigger('click');
await nextTick();
- expect(wrapper.find(ExpandButton).text().trim()).not.toBe(text.expanded);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).not.toBe(text.expanded);
});
describe('when short text is provided', () => {
@@ -145,11 +145,11 @@ describe('Expand button', () => {
});
it('clicking reveals short text', async () => {
- expect(wrapper.find(ExpandButton).text().trim()).toBe(text.expanded);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).toBe(text.expanded);
expanderAppendEl().trigger('click');
await nextTick();
- expect(wrapper.find(ExpandButton).text().trim()).toBe(text.short);
+ expect(wrapper.findComponent(ExpandButton).text().trim()).toBe(text.short);
});
});
});
diff --git a/spec/frontend/vue_shared/components/file_icon_spec.js b/spec/frontend/vue_shared/components/file_icon_spec.js
index b0e623520a8..3f4bfc86b67 100644
--- a/spec/frontend/vue_shared/components/file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/file_icon_spec.js
@@ -6,7 +6,7 @@ import { FILE_SYMLINK_MODE } from '~/vue_shared/constants';
describe('File Icon component', () => {
let wrapper;
const findSvgIcon = () => wrapper.find('svg');
- const findGlIcon = () => wrapper.find(GlIcon);
+ const findGlIcon = () => wrapper.findComponent(GlIcon);
const getIconName = () =>
findSvgIcon()
.find('use')
@@ -61,7 +61,7 @@ describe('File Icon component', () => {
loading: true,
});
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('should add a special class and a size class', () => {
diff --git a/spec/frontend/vue_shared/components/file_row_spec.js b/spec/frontend/vue_shared/components/file_row_spec.js
index 62fb29c455c..f5a545891d5 100644
--- a/spec/frontend/vue_shared/components/file_row_spec.js
+++ b/spec/frontend/vue_shared/components/file_row_spec.js
@@ -119,7 +119,7 @@ describe('File row component', () => {
level: 0,
});
- expect(wrapper.find(FileHeader).exists()).toBe(true);
+ expect(wrapper.findComponent(FileHeader).exists()).toBe(true);
});
it('matches the current route against encoded file URL', () => {
@@ -164,6 +164,6 @@ describe('File row component', () => {
level: 0,
});
- expect(wrapper.find(FileIcon).props('submodule')).toBe(submodule);
+ expect(wrapper.findComponent(FileIcon).props('submodule')).toBe(submodule);
});
});
diff --git a/spec/frontend/vue_shared/components/file_tree_spec.js b/spec/frontend/vue_shared/components/file_tree_spec.js
index 39a7c7a2b3a..e8818e09dc0 100644
--- a/spec/frontend/vue_shared/components/file_tree_spec.js
+++ b/spec/frontend/vue_shared/components/file_tree_spec.js
@@ -25,8 +25,8 @@ describe('File Tree component', () => {
});
};
- const findFileRow = () => wrapper.find(MockFileRow);
- const findChildrenTrees = () => wrapper.findAll(FileTree).wrappers.slice(1);
+ const findFileRow = () => wrapper.findComponent(MockFileRow);
+ const findChildrenTrees = () => wrapper.findAllComponents(FileTree).wrappers.slice(1);
const findChildrenTreeProps = () =>
findChildrenTrees().map((x) => ({
...x.props(),
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
index e44bc8771f5..1b9ca8e6092 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/filtered_search_bar_root_spec.js
@@ -88,10 +88,10 @@ describe('FilteredSearchBarRoot', () => {
expect(wrapper.vm.filterValue).toEqual([]);
expect(wrapper.vm.selectedSortOption).toBe(mockSortOptions[0]);
expect(wrapper.vm.selectedSortDirection).toBe(SortDirection.descending);
- expect(wrapper.find(GlButtonGroup).exists()).toBe(true);
- expect(wrapper.find(GlButton).exists()).toBe(true);
- expect(wrapper.find(GlDropdown).exists()).toBe(true);
- expect(wrapper.find(GlDropdownItem).exists()).toBe(true);
+ expect(wrapper.findComponent(GlButtonGroup).exists()).toBe(true);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
});
it('does not initialize `selectedSortOption` and `selectedSortDirection` when `sortOptions` is not applied and hides the sort dropdown', () => {
@@ -99,10 +99,10 @@ describe('FilteredSearchBarRoot', () => {
expect(wrapperNoSort.vm.filterValue).toEqual([]);
expect(wrapperNoSort.vm.selectedSortOption).toBe(undefined);
- expect(wrapperNoSort.find(GlButtonGroup).exists()).toBe(false);
- expect(wrapperNoSort.find(GlButton).exists()).toBe(false);
- expect(wrapperNoSort.find(GlDropdown).exists()).toBe(false);
- expect(wrapperNoSort.find(GlDropdownItem).exists()).toBe(false);
+ expect(wrapperNoSort.findComponent(GlButtonGroup).exists()).toBe(false);
+ expect(wrapperNoSort.findComponent(GlButton).exists()).toBe(false);
+ expect(wrapperNoSort.findComponent(GlDropdown).exists()).toBe(false);
+ expect(wrapperNoSort.findComponent(GlDropdownItem).exists()).toBe(false);
});
});
@@ -217,7 +217,7 @@ describe('FilteredSearchBarRoot', () => {
it('emits component event `onFilter` with empty array and true when initially selected filter value was cleared', async () => {
wrapper = createComponent({ initialFilterValue: [tokenValueLabel] });
- wrapper.find(GlFilteredSearch).vm.$emit('clear');
+ wrapper.findComponent(GlFilteredSearch).vm.$emit('clear');
await nextTick();
expect(wrapper.emitted('onFilter')[0]).toEqual([[], true]);
@@ -362,7 +362,7 @@ describe('FilteredSearchBarRoot', () => {
it('calls `blurSearchInput` method to remove focus from filter input field', () => {
jest.spyOn(wrapper.vm, 'blurSearchInput');
- wrapper.find(GlFilteredSearch).vm.$emit('submit', mockFilters);
+ wrapper.findComponent(GlFilteredSearch).vm.$emit('submit', mockFilters);
expect(wrapper.vm.blurSearchInput).toHaveBeenCalled();
});
@@ -392,7 +392,7 @@ describe('FilteredSearchBarRoot', () => {
});
it('renders gl-filtered-search component', () => {
- const glFilteredSearchEl = wrapper.find(GlFilteredSearch);
+ const glFilteredSearchEl = wrapper.findComponent(GlFilteredSearch);
expect(glFilteredSearchEl.props('placeholder')).toBe('Filter requirements');
expect(glFilteredSearchEl.props('availableTokens')).toEqual(mockAvailableTokens);
@@ -404,8 +404,10 @@ describe('FilteredSearchBarRoot', () => {
showCheckbox: true,
});
- expect(wrapperWithCheckbox.find(GlFormCheckbox).exists()).toBe(true);
- expect(wrapperWithCheckbox.find(GlFormCheckbox).attributes('checked')).not.toBeDefined();
+ expect(wrapperWithCheckbox.findComponent(GlFormCheckbox).exists()).toBe(true);
+ expect(
+ wrapperWithCheckbox.findComponent(GlFormCheckbox).attributes('checked'),
+ ).not.toBeDefined();
wrapperWithCheckbox.destroy();
@@ -414,7 +416,7 @@ describe('FilteredSearchBarRoot', () => {
checkboxChecked: true,
});
- expect(wrapperWithCheckbox.find(GlFormCheckbox).attributes('checked')).toBe('true');
+ expect(wrapperWithCheckbox.findComponent(GlFormCheckbox).attributes('checked')).toBe('true');
wrapperWithCheckbox.destroy();
});
@@ -448,7 +450,7 @@ describe('FilteredSearchBarRoot', () => {
await nextTick();
- expect(wrapperFullMount.find(GlDropdownItem).text()).toBe('Membership := Direct');
+ expect(wrapperFullMount.findComponent(GlDropdownItem).text()).toBe('Membership := Direct');
wrapperFullMount.destroy();
});
@@ -466,20 +468,20 @@ describe('FilteredSearchBarRoot', () => {
await nextTick();
- expect(wrapperFullMount.find(GlDropdownItem).text()).toBe('Membership := exclude');
+ expect(wrapperFullMount.findComponent(GlDropdownItem).text()).toBe('Membership := exclude');
wrapperFullMount.destroy();
});
});
it('renders sort dropdown component', () => {
- expect(wrapper.find(GlButtonGroup).exists()).toBe(true);
- expect(wrapper.find(GlDropdown).exists()).toBe(true);
- expect(wrapper.find(GlDropdown).props('text')).toBe(mockSortOptions[0].title);
+ expect(wrapper.findComponent(GlButtonGroup).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(GlDropdown).props('text')).toBe(mockSortOptions[0].title);
});
it('renders sort dropdown items', () => {
- const dropdownItemsEl = wrapper.findAll(GlDropdownItem);
+ const dropdownItemsEl = wrapper.findAllComponents(GlDropdownItem);
expect(dropdownItemsEl).toHaveLength(mockSortOptions.length);
expect(dropdownItemsEl.at(0).text()).toBe(mockSortOptions[0].title);
@@ -488,7 +490,7 @@ describe('FilteredSearchBarRoot', () => {
});
it('renders sort direction button', () => {
- const sortButtonEl = wrapper.find(GlButton);
+ const sortButtonEl = wrapper.findComponent(GlButton);
expect(sortButtonEl.attributes('title')).toBe('Sort direction: Descending');
expect(sortButtonEl.props('icon')).toBe('sort-highest');
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 86d1f21fd04..a6713b7e7e4 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -66,12 +66,14 @@ export const mockMilestones = [
export const mockCrmContacts = [
{
+ __typename: 'CustomerRelationsContact',
id: 'gid://gitlab/CustomerRelations::Contact/1',
firstName: 'John',
lastName: 'Smith',
email: 'john@smith.com',
},
{
+ __typename: 'CustomerRelationsContact',
id: 'gid://gitlab/CustomerRelations::Contact/2',
firstName: 'Andy',
lastName: 'Green',
@@ -81,10 +83,12 @@ export const mockCrmContacts = [
export const mockCrmOrganizations = [
{
+ __typename: 'CustomerRelationsOrganization',
id: 'gid://gitlab/CustomerRelations::Organization/1',
name: 'First Org Ltd.',
},
{
+ __typename: 'CustomerRelationsOrganization',
id: 'gid://gitlab/CustomerRelations::Organization/2',
name: 'Organizer S.p.a.',
},
@@ -102,11 +106,9 @@ export const mockProjectCrmContactsQueryResponse = {
__typename: 'CustomerRelationsContactConnection',
nodes: [
{
- __typename: 'CustomerRelationsContact',
...mockCrmContacts[0],
},
{
- __typename: 'CustomerRelationsContact',
...mockCrmContacts[1],
},
],
@@ -128,11 +130,9 @@ export const mockProjectCrmOrganizationsQueryResponse = {
__typename: 'CustomerRelationsOrganizationConnection',
nodes: [
{
- __typename: 'CustomerRelationsOrganization',
...mockCrmOrganizations[0],
},
{
- __typename: 'CustomerRelationsOrganization',
...mockCrmOrganizations[1],
},
],
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
index 3f24d5df858..302dfabffb2 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/author_token_spec.js
@@ -195,7 +195,7 @@ describe('AuthorToken', () => {
});
await nextTick();
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // Author, =, "Administrator"
@@ -207,7 +207,7 @@ describe('AuthorToken', () => {
it('renders token value with correct avatarUrl from author object', async () => {
const getAvatarEl = () =>
- wrapper.findAll(GlFilteredSearchTokenSegment).at(2).findComponent(GlAvatar);
+ wrapper.findAllComponents(GlFilteredSearchTokenSegment).at(2).findComponent(GlAvatar);
wrapper = createComponent({
value: { data: mockAuthors[0].username },
@@ -252,7 +252,7 @@ describe('AuthorToken', () => {
await activateSuggestionsList();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultAuthors.length + currentUserLength);
defaultAuthors.forEach((label, index) => {
@@ -266,12 +266,12 @@ describe('AuthorToken', () => {
config: { ...mockAuthorToken, defaultAuthors: [] },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_NONE_ANY` as default suggestions', async () => {
@@ -283,7 +283,7 @@ describe('AuthorToken', () => {
await activateSuggestionsList();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(2 + currentUserLength);
expect(suggestions.at(0).text()).toBe(DEFAULT_NONE_ANY[0].text);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
index 7b495ec9bee..1de35daa3a5 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/branch_token_spec.js
@@ -114,7 +114,7 @@ describe('BranchToken', () => {
describe('template', () => {
const defaultBranches = DEFAULT_NONE_ANY;
async function showSuggestions() {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
@@ -133,11 +133,11 @@ describe('BranchToken', () => {
});
it('renders gl-filtered-search-token component', () => {
- expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFilteredSearchToken).exists()).toBe(true);
});
it('renders token item when value is selected', () => {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3);
expect(tokenSegments.at(2).text()).toBe(mockBranches[0].name);
@@ -150,7 +150,7 @@ describe('BranchToken', () => {
stubs: { Portal: true },
});
await showSuggestions();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultBranches.length);
defaultBranches.forEach((branch, index) => {
@@ -166,8 +166,8 @@ describe('BranchToken', () => {
});
await showSuggestions();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders no suggestions as default', async () => {
@@ -177,7 +177,7 @@ describe('BranchToken', () => {
stubs: { Portal: true },
});
await showSuggestions();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(0);
});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
index 157e021fc60..c9879987931 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_contact_token_spec.js
@@ -195,7 +195,7 @@ describe('CrmContactToken', () => {
value: { data: '1' },
});
- const baseTokenEl = wrapper.find(BaseToken);
+ const baseTokenEl = wrapper.findComponent(BaseToken);
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
@@ -210,7 +210,7 @@ describe('CrmContactToken', () => {
value: { data: `${getIdFromGraphQLId(contact.id)}` },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // Contact, =, Contact name
expect(tokenSegments.at(2).text()).toBe(`${contact.firstName} ${contact.lastName}`); // Contact name
@@ -222,12 +222,12 @@ describe('CrmContactToken', () => {
config: { ...mockCrmContactToken, defaultContacts },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultContacts.length);
defaultContacts.forEach((contact, index) => {
@@ -241,13 +241,13 @@ describe('CrmContactToken', () => {
config: { ...mockCrmContactToken, defaultContacts: [] },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
@@ -256,11 +256,11 @@ describe('CrmContactToken', () => {
config: { ...mockCrmContactToken },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
DEFAULT_NONE_ANY.forEach((contact, index) => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
index 977f8bbef61..16333b052e6 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/crm_organization_token_spec.js
@@ -194,7 +194,7 @@ describe('CrmOrganizationToken', () => {
value: { data: '1' },
});
- const baseTokenEl = wrapper.find(BaseToken);
+ const baseTokenEl = wrapper.findComponent(BaseToken);
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
@@ -209,7 +209,7 @@ describe('CrmOrganizationToken', () => {
value: { data: `${getIdFromGraphQLId(organization.id)}` },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // Organization, =, Organization name
expect(tokenSegments.at(2).text()).toBe(organization.name); // Organization name
@@ -221,12 +221,12 @@ describe('CrmOrganizationToken', () => {
config: { ...mockCrmOrganizationToken, defaultOrganizations },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultOrganizations.length);
defaultOrganizations.forEach((organization, index) => {
@@ -240,13 +240,13 @@ describe('CrmOrganizationToken', () => {
config: { ...mockCrmOrganizationToken, defaultOrganizations: [] },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
@@ -255,11 +255,11 @@ describe('CrmOrganizationToken', () => {
config: { ...mockCrmOrganizationToken },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
DEFAULT_NONE_ANY.forEach((organization, index) => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
index dcb0d095b1b..bf4a6eb7635 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -135,14 +135,16 @@ describe('EmojiToken', () => {
});
it('renders gl-filtered-search-token component', () => {
- expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFilteredSearchToken).exists()).toBe(true);
});
it('renders token item when value is selected', () => {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // My Reaction, =, "thumbsup"
- expect(tokenSegments.at(2).find(GlEmoji).attributes('data-name')).toEqual('thumbsup');
+ expect(tokenSegments.at(2).findComponent(GlEmoji).attributes('data-name')).toEqual(
+ 'thumbsup',
+ );
});
it('renders provided defaultEmojis as suggestions', async () => {
@@ -151,12 +153,12 @@ describe('EmojiToken', () => {
config: { ...mockReactionEmojiToken, defaultEmojis },
stubs: { Portal: true, GlEmoji },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultEmojis.length);
defaultEmojis.forEach((emoji, index) => {
@@ -170,13 +172,13 @@ describe('EmojiToken', () => {
config: { ...mockReactionEmojiToken, defaultEmojis: [] },
stubs: { Portal: true, GlEmoji },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_LABEL_NONE` and `DEFAULT_LABEL_ANY` as default suggestions', async () => {
@@ -185,12 +187,12 @@ describe('EmojiToken', () => {
config: { ...mockReactionEmojiToken },
stubs: { Portal: true, GlEmoji },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(2);
expect(suggestions.at(0).text()).toBe(DEFAULT_LABEL_NONE.text);
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index 51161a1a0ef..01e281884ed 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -156,7 +156,7 @@ describe('LabelToken', () => {
});
it('renders base-token component', () => {
- const baseTokenEl = wrapper.find(BaseToken);
+ const baseTokenEl = wrapper.findComponent(BaseToken);
expect(baseTokenEl.exists()).toBe(true);
expect(baseTokenEl.props()).toMatchObject({
@@ -166,7 +166,7 @@ describe('LabelToken', () => {
});
it('renders token item when value is selected', () => {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // Label, =, "Foo Label"
expect(tokenSegments.at(2).text()).toBe(`~${mockRegularLabel.title}`); // "Foo Label"
@@ -181,12 +181,12 @@ describe('LabelToken', () => {
config: { ...mockLabelToken, defaultLabels },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultLabels.length);
defaultLabels.forEach((label, index) => {
@@ -200,13 +200,13 @@ describe('LabelToken', () => {
config: { ...mockLabelToken, defaultLabels: [] },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_NONE_ANY` as default suggestions', () => {
@@ -215,11 +215,11 @@ describe('LabelToken', () => {
config: { ...mockLabelToken },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(DEFAULT_NONE_ANY.length);
DEFAULT_NONE_ANY.forEach((label, index) => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
index 7c545f76c0b..f71ba51fc5b 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/milestone_token_spec.js
@@ -155,11 +155,11 @@ describe('MilestoneToken', () => {
});
it('renders gl-filtered-search-token component', () => {
- expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFilteredSearchToken).exists()).toBe(true);
});
it('renders token item when value is selected', () => {
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
expect(tokenSegments).toHaveLength(3); // Milestone, =, '%"4.0"'
expect(tokenSegments.at(2).text()).toBe(`%${mockRegularMilestone.title}`); // "4.0 RC1"
@@ -171,12 +171,12 @@ describe('MilestoneToken', () => {
config: { ...mockMilestoneToken, defaultMilestones },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(defaultMilestones.length);
defaultMilestones.forEach((milestone, index) => {
@@ -190,13 +190,13 @@ describe('MilestoneToken', () => {
config: { ...mockMilestoneToken, defaultMilestones: [] },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
- expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ expect(wrapper.findComponent(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.findComponent(GlDropdownDivider).exists()).toBe(false);
});
it('renders `DEFAULT_MILESTONES` as default suggestions', async () => {
@@ -205,12 +205,12 @@ describe('MilestoneToken', () => {
config: { ...mockMilestoneToken },
stubs: { Portal: true },
});
- const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const tokenSegments = wrapper.findAllComponents(GlFilteredSearchTokenSegment);
const suggestionsSegment = tokenSegments.at(2);
suggestionsSegment.vm.$emit('activate');
await nextTick();
- const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+ const suggestions = wrapper.findAllComponents(GlFilteredSearchSuggestion);
expect(suggestions).toHaveLength(DEFAULT_MILESTONES.length);
DEFAULT_MILESTONES.forEach((milestone, index) => {
diff --git a/spec/frontend/vue_shared/components/gitlab_version_check_spec.js b/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
index b180e8c12dd..6699ae5fb69 100644
--- a/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
+++ b/spec/frontend/vue_shared/components/gitlab_version_check_spec.js
@@ -26,13 +26,44 @@ describe('GitlabVersionCheck', () => {
wrapper = shallowMount(GitlabVersionCheck);
};
+ const dummyGon = {
+ relative_url_root: '/',
+ };
+
+ let originalGon;
+
afterEach(() => {
wrapper.destroy();
mock.restore();
+ window.gon = originalGon;
});
const findGlBadge = () => wrapper.findComponent(GlBadge);
+ describe.each`
+ root | description
+ ${'/'} | ${'not used (uses its own (sub)domain)'}
+ ${'/gitlab'} | ${'custom path'}
+ ${'/service/gitlab'} | ${'custom path with 2 depth'}
+ `('path for version_check.json', ({ root, description }) => {
+ describe(`when relative url is ${description}: ${root}`, () => {
+ beforeEach(async () => {
+ originalGon = window.gon;
+ window.gon = { ...dummyGon };
+ window.gon.relative_url_root = root;
+ createComponent(defaultResponse);
+ await waitForPromises(); // Ensure we wrap up the axios call
+ });
+
+ it('reflects the relative url setting', () => {
+ expect(mock.history.get.length).toBe(1);
+
+ const pathRegex = new RegExp(`^${root}`);
+ expect(mock.history.get[0].url).toMatch(pathRegex);
+ });
+ });
+ });
+
describe('template', () => {
describe.each`
description | mockResponse | renders
diff --git a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
index c0a6588833e..2dcd91f737f 100644
--- a/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
+++ b/spec/frontend/vue_shared/components/gl_modal_vuex_spec.js
@@ -59,7 +59,7 @@ describe('GlModalVuex', () => {
default: `<div>${TEST_SLOT}</div>`,
},
});
- const glModal = wrapper.find(GlModal);
+ const glModal = wrapper.findComponent(GlModal);
expect(glModal.props('modalId')).toBe(TEST_MODAL_ID);
expect(glModal.text()).toContain(TEST_SLOT);
@@ -76,7 +76,7 @@ describe('GlModalVuex', () => {
okVariant,
},
});
- const glModal = wrapper.find(GlModal);
+ const glModal = wrapper.findComponent(GlModal);
expect(glModal.attributes('title')).toEqual(title);
expect(glModal.attributes('oktitle')).toEqual(title);
@@ -90,7 +90,7 @@ describe('GlModalVuex', () => {
listeners: { ok },
});
- const glModal = wrapper.find(GlModal);
+ const glModal = wrapper.findComponent(GlModal);
glModal.vm.$emit('ok');
expect(ok).toHaveBeenCalledTimes(1);
@@ -101,7 +101,7 @@ describe('GlModalVuex', () => {
factory();
- const glModal = wrapper.find(GlModal);
+ const glModal = wrapper.findComponent(GlModal);
glModal.vm.$emit('shown');
expect(actions.show).toHaveBeenCalledTimes(1);
@@ -112,7 +112,7 @@ describe('GlModalVuex', () => {
factory();
- const glModal = wrapper.find(GlModal);
+ const glModal = wrapper.findComponent(GlModal);
glModal.vm.$emit('hidden');
expect(actions.hide).toHaveBeenCalledTimes(1);
diff --git a/spec/frontend/vue_shared/components/help_popover_spec.js b/spec/frontend/vue_shared/components/help_popover_spec.js
index 64dce194327..6fd5ae0e946 100644
--- a/spec/frontend/vue_shared/components/help_popover_spec.js
+++ b/spec/frontend/vue_shared/components/help_popover_spec.js
@@ -7,8 +7,8 @@ describe('HelpPopover', () => {
const title = 'popover <strong>title</strong>';
const content = 'popover <b>content</b>';
- const findQuestionButton = () => wrapper.find(GlButton);
- const findPopover = () => wrapper.find(GlPopover);
+ const findQuestionButton = () => wrapper.findComponent(GlButton);
+ const findPopover = () => wrapper.findComponent(GlPopover);
const createComponent = ({ props, ...opts } = {}) => {
wrapper = mount(HelpPopover, {
diff --git a/spec/frontend/vue_shared/components/integration_help_text_spec.js b/spec/frontend/vue_shared/components/integration_help_text_spec.js
index c0e8b719007..c63e46313b3 100644
--- a/spec/frontend/vue_shared/components/integration_help_text_spec.js
+++ b/spec/frontend/vue_shared/components/integration_help_text_spec.js
@@ -30,9 +30,9 @@ describe('IntegrationHelpText component', () => {
it('should use the gl components', () => {
wrapper = createComponent();
- expect(wrapper.find(GlSprintf).exists()).toBe(true);
- expect(wrapper.find(GlIcon).exists()).toBe(true);
- expect(wrapper.find(GlLink).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(true);
});
it('should render the help text', () => {
@@ -44,9 +44,9 @@ describe('IntegrationHelpText component', () => {
it('should not use the gl-link and gl-icon components', () => {
wrapper = createComponent({ message: 'Click nowhere!' });
- expect(wrapper.find(GlSprintf).exists()).toBe(true);
- expect(wrapper.find(GlIcon).exists()).toBe(false);
- expect(wrapper.find(GlLink).exists()).toBe(false);
+ expect(wrapper.findComponent(GlSprintf).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(false);
});
it('should not render the link when start and end is not provided', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 85a135d2b89..50864a4bf25 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -76,7 +76,7 @@ describe('Markdown field component', () => {
const getMarkdownButton = () => subject.find('.js-md');
const getListBulletedButton = () => subject.findAll('.js-md[title="Add a bullet list"]');
const getVideo = () => subject.find('video');
- const getAttachButton = () => subject.find('.button-attach-file');
+ const getAttachButton = () => subject.findByTestId('button-attach-file');
const clickAttachButton = () => getAttachButton().trigger('click');
const findDropzone = () => subject.find('.div-dropzone');
const findMarkdownHeader = () => subject.findComponent(MarkdownFieldHeader);
@@ -232,13 +232,10 @@ describe('Markdown field component', () => {
});
});
- it('should render attach a file button', () => {
- expect(getAttachButton().text()).toBe('Attach a file');
- });
-
it('should trigger dropzone when attach button is clicked', () => {
expect(dropzoneSpy).not.toHaveBeenCalled();
+ getAttachButton().trigger('click');
clickAttachButton();
expect(dropzoneSpy).toHaveBeenCalled();
diff --git a/spec/frontend/vue_shared/components/markdown/header_spec.js b/spec/frontend/vue_shared/components/markdown/header_spec.js
index 67222cab247..9831908f806 100644
--- a/spec/frontend/vue_shared/components/markdown/header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/header_spec.js
@@ -21,7 +21,7 @@ describe('Markdown field header component', () => {
const findWriteTab = () => wrapper.findByTestId('write-tab');
const findPreviewTab = () => wrapper.findByTestId('preview-tab');
const findToolbar = () => wrapper.findByTestId('md-header-toolbar');
- const findToolbarButtons = () => wrapper.findAll(ToolbarButton);
+ const findToolbarButtons = () => wrapper.findAllComponents(ToolbarButton);
const findToolbarButtonByProp = (prop, value) =>
findToolbarButtons()
.filter((button) => button.props(prop) === value)
@@ -44,16 +44,16 @@ describe('Markdown field header component', () => {
describe('markdown header buttons', () => {
it('renders the buttons with the correct title', () => {
const buttons = [
+ 'Insert suggestion',
'Add bold text (⌘B)',
'Add italic text (⌘I)',
'Add strikethrough text (⌘⇧X)',
'Insert a quote',
- 'Insert suggestion',
'Insert code',
'Add a link (⌘K)',
'Add a bullet list',
'Add a numbered list',
- 'Add a task list',
+ 'Add a checklist',
'Add a collapsible section',
'Add a table',
'Go full screen',
@@ -65,6 +65,13 @@ describe('Markdown field header component', () => {
});
});
+ it('renders "Attach a file or image" button using gl-button', () => {
+ const button = wrapper.findByTestId('button-attach-file');
+
+ expect(button.element.tagName).toBe('GL-BUTTON-STUB');
+ expect(button.attributes('title')).toBe('Attach a file or image');
+ });
+
describe('when the user is on a non-Mac', () => {
beforeEach(() => {
delete window.gl.client.isMac;
@@ -118,8 +125,8 @@ describe('Markdown field header component', () => {
),
]);
- expect(wrapper.emitted('preview-markdown')).toBeFalsy();
- expect(wrapper.emitted('write-markdown')).toBeFalsy();
+ expect(wrapper.emitted('preview-markdown')).toBeUndefined();
+ expect(wrapper.emitted('write-markdown')).toBeUndefined();
});
it('blurs preview link after click', () => {
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index 9944267cf24..9db1b779a04 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -38,13 +38,13 @@ describe('Suggestion Diff component', () => {
wrapper.destroy();
});
- const findApplyButton = () => wrapper.find(ApplySuggestion);
+ const findApplyButton = () => wrapper.findComponent(ApplySuggestion);
const findApplyBatchButton = () => wrapper.find('.js-apply-batch-btn');
const findAddToBatchButton = () => wrapper.find('.js-add-to-batch-btn');
const findRemoveFromBatchButton = () => wrapper.find('.js-remove-from-batch-btn');
const findHeader = () => wrapper.find('.js-suggestion-diff-header');
const findHelpButton = () => wrapper.find('.js-help-btn');
- const findLoading = () => wrapper.find(GlLoadingIcon);
+ const findLoading = () => wrapper.findComponent(GlLoadingIcon);
it('renders a suggestion header', () => {
createComponent();
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
index af27e953776..d84483c1663 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_spec.js
@@ -71,7 +71,7 @@ describe('Suggestion Diff component', () => {
});
it('renders a correct amount of suggestion diff rows', () => {
- expect(wrapper.findAll(SuggestionDiffRow)).toHaveLength(3);
+ expect(wrapper.findAllComponents(SuggestionDiffRow)).toHaveLength(3);
});
it.each`
@@ -81,14 +81,14 @@ describe('Suggestion Diff component', () => {
${'addToBatch'} | ${[]} | ${[suggestionId]}
${'removeFromBatch'} | ${[]} | ${[suggestionId]}
`('emits $event event on sugestion diff header $event', ({ event, childArgs, args }) => {
- wrapper.find(SuggestionDiffHeader).vm.$emit(event, ...childArgs);
+ wrapper.findComponent(SuggestionDiffHeader).vm.$emit(event, ...childArgs);
expect(wrapper.emitted(event)).toBeDefined();
expect(wrapper.emitted(event)).toEqual([args]);
});
it('passes suggestion batch props to suggestion diff header', () => {
- expect(wrapper.find(SuggestionDiffHeader).props()).toMatchObject({
+ expect(wrapper.findComponent(SuggestionDiffHeader).props()).toMatchObject({
batchSuggestionsCount: 1,
isBatched: true,
isApplyingBatch: MOCK_DATA.suggestion.is_applying_batch,
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
index 19e4f2d8c92..82210e79799 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_button_spec.js
@@ -26,7 +26,7 @@ describe('toolbar_button', () => {
});
const getButtonShortcutsAttr = () => {
- return wrapper.find(GlButton).attributes('data-md-shortcuts');
+ return wrapper.findComponent(GlButton).attributes('data-md-shortcuts');
};
describe('keyboard shortcuts', () => {
diff --git a/spec/frontend/vue_shared/components/memory_graph_spec.js b/spec/frontend/vue_shared/components/memory_graph_spec.js
index 53b96bd1b98..ae8d5ff78ba 100644
--- a/spec/frontend/vue_shared/components/memory_graph_spec.js
+++ b/spec/frontend/vue_shared/components/memory_graph_spec.js
@@ -47,7 +47,7 @@ describe('MemoryGraph', () => {
it('should draw container with chart', () => {
expect(wrapper.element).toMatchSnapshot();
expect(wrapper.find('.memory-graph-container').exists()).toBe(true);
- expect(wrapper.find(GlSparklineChart).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSparklineChart).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
index 2cefa77b72d..1789610dba9 100644
--- a/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
+++ b/spec/frontend/vue_shared/components/metric_images/metric_images_tab_spec.js
@@ -114,7 +114,7 @@ describe('Metric images tab', () => {
await waitForPromises();
- expect(findModal().attributes('visible')).toBeFalsy();
+ expect(findModal().attributes('visible')).toBeUndefined();
});
it('should add files and url when selected', async () => {
diff --git a/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js b/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js
index c11b20a692e..2c14d65186b 100644
--- a/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js
+++ b/spec/frontend/vue_shared/components/namespace_select/namespace_select_spec.js
@@ -1,5 +1,12 @@
import { nextTick } from 'vue';
-import { GlDropdown, GlDropdownItem, GlDropdownSectionHeader, GlSearchBoxByType } from '@gitlab/ui';
+import {
+ GlDropdown,
+ GlDropdownItem,
+ GlDropdownSectionHeader,
+ GlSearchBoxByType,
+ GlIntersectionObserver,
+ GlLoadingIcon,
+} from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import NamespaceSelect, {
i18n,
@@ -7,7 +14,7 @@ import NamespaceSelect, {
} from '~/vue_shared/components/namespace_select/namespace_select.vue';
import { userNamespaces, groupNamespaces } from './mock_data';
-const FLAT_NAMESPACES = [...groupNamespaces, ...userNamespaces];
+const FLAT_NAMESPACES = [...userNamespaces, ...groupNamespaces];
const EMPTY_NAMESPACE_TITLE = 'Empty namespace TEST';
const EMPTY_NAMESPACE_ITEM = { id: EMPTY_NAMESPACE_ID, humanName: EMPTY_NAMESPACE_TITLE };
@@ -31,6 +38,8 @@ describe('Namespace Select', () => {
const findDropdown = () => wrapper.findComponent(GlDropdown);
const findDropdownText = () => findDropdown().props('text');
const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findGroupDropdownItems = () =>
+ wrapper.findByTestId('namespace-list-groups').findAllComponents(GlDropdownItem);
const findDropdownItemsTexts = () => findDropdownItems().wrappers.map((x) => x.text());
const findSectionHeaders = () => wrapper.findAllComponents(GlDropdownSectionHeader);
const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
@@ -59,7 +68,7 @@ describe('Namespace Select', () => {
it('splits group and user namespaces', () => {
const headers = findSectionHeaders();
- expect(wrappersText(headers)).toEqual([i18n.GROUPS, i18n.USERS]);
+ expect(wrappersText(headers)).toEqual([i18n.USERS, i18n.GROUPS]);
});
it('does not render wrapper as full width', () => {
@@ -89,18 +98,20 @@ describe('Namespace Select', () => {
describe('with search', () => {
it.each`
- term | includeEmptyNamespace | expectedItems
- ${''} | ${false} | ${[...groupNamespaces, ...userNamespaces]}
- ${'sub'} | ${false} | ${[groupNamespaces[1]]}
- ${'User'} | ${false} | ${[...userNamespaces]}
- ${'User'} | ${true} | ${[...userNamespaces]}
- ${'namespace'} | ${true} | ${[EMPTY_NAMESPACE_ITEM, ...userNamespaces]}
+ term | includeEmptyNamespace | shouldFilterNamespaces | expectedItems
+ ${''} | ${false} | ${true} | ${[...userNamespaces, ...groupNamespaces]}
+ ${'sub'} | ${false} | ${true} | ${[groupNamespaces[1]]}
+ ${'User'} | ${false} | ${true} | ${[...userNamespaces]}
+ ${'User'} | ${true} | ${true} | ${[...userNamespaces]}
+ ${'namespace'} | ${true} | ${true} | ${[EMPTY_NAMESPACE_ITEM, ...userNamespaces]}
+ ${'sub'} | ${false} | ${false} | ${[...userNamespaces, ...groupNamespaces]}
`(
- 'with term=$term and includeEmptyNamespace=$includeEmptyNamespace, should show $expectedItems.length',
- async ({ term, includeEmptyNamespace, expectedItems }) => {
+ 'with term=$term, includeEmptyNamespace=$includeEmptyNamespace, and shouldFilterNamespaces=$shouldFilterNamespaces should show $expectedItems.length',
+ async ({ term, includeEmptyNamespace, shouldFilterNamespaces, expectedItems }) => {
wrapper = createComponent({
includeEmptyNamespace,
emptyNamespaceTitle: EMPTY_NAMESPACE_TITLE,
+ shouldFilterNamespaces,
});
search(term);
@@ -114,6 +125,18 @@ describe('Namespace Select', () => {
);
});
+ describe('when search is typed in', () => {
+ it('emits `search` event', async () => {
+ wrapper = createComponent();
+
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'foo');
+
+ await nextTick();
+
+ expect(wrapper.emitted('search')).toEqual([['foo']]);
+ });
+ });
+
describe('with a selected namespace', () => {
const selectedGroupIndex = 1;
const selectedItem = groupNamespaces[selectedGroupIndex];
@@ -121,7 +144,8 @@ describe('Namespace Select', () => {
beforeEach(() => {
wrapper = createComponent();
- findDropdownItems().at(selectedGroupIndex).vm.$emit('click');
+ wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'foo');
+ findGroupDropdownItems().at(selectedGroupIndex).vm.$emit('click');
});
it('sets the dropdown text', () => {
@@ -132,6 +156,10 @@ describe('Namespace Select', () => {
const args = [selectedItem];
expect(wrapper.emitted('select')).toEqual([args]);
});
+
+ it('clears search', () => {
+ expect(wrapper.findComponent(GlSearchBoxByType).props('value')).toBe('');
+ });
});
describe('with an empty namespace option', () => {
@@ -166,4 +194,33 @@ describe('Namespace Select', () => {
expect(findDropdownItemsTexts().includes(EMPTY_NAMESPACE_TITLE)).toBe(shouldShow);
});
});
+
+ describe('when `hasNextPageOfGroups` prop is `true`', () => {
+ it('renders `GlIntersectionObserver` and emits `load-more-groups` event when bottom is reached', () => {
+ wrapper = createComponent({ hasNextPageOfGroups: true });
+
+ const intersectionObserver = wrapper.findComponent(GlIntersectionObserver);
+
+ intersectionObserver.vm.$emit('appear');
+
+ expect(intersectionObserver.exists()).toBe(true);
+ expect(wrapper.emitted('load-more-groups')).toEqual([[]]);
+ });
+
+ describe('when `isLoadingMoreGroups` prop is `true`', () => {
+ it('renders a loading icon', () => {
+ wrapper = createComponent({ hasNextPageOfGroups: true, isLoadingMoreGroups: true });
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when `isSearchLoading` prop is `true`', () => {
+ it('sets `isLoading` prop to `true`', () => {
+ wrapper = createComponent({ isSearchLoading: true });
+
+ expect(wrapper.findComponent(GlSearchBoxByType).props('isLoading')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/navigation_tabs_spec.js b/spec/frontend/vue_shared/components/navigation_tabs_spec.js
index 30a89fed12f..b1bec28bffb 100644
--- a/spec/frontend/vue_shared/components/navigation_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/navigation_tabs_spec.js
@@ -44,7 +44,7 @@ describe('navigation tabs component', () => {
});
it('should render tabs', () => {
- expect(wrapper.findAll(GlTab)).toHaveLength(data.length);
+ expect(wrapper.findAllComponents(GlTab)).toHaveLength(data.length);
});
it('should render active tab', () => {
diff --git a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
index 99b65ca6937..17a62ae8a33 100644
--- a/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
+++ b/spec/frontend/vue_shared/components/notes/noteable_warning_spec.js
@@ -6,10 +6,11 @@ import NoteableWarning from '~/vue_shared/components/notes/noteable_warning.vue'
describe('Issue Warning Component', () => {
let wrapper;
- const findIcon = (w = wrapper) => w.find(GlIcon);
- const findLockedBlock = (w = wrapper) => w.find({ ref: 'locked' });
- const findConfidentialBlock = (w = wrapper) => w.find({ ref: 'confidential' });
- const findLockedAndConfidentialBlock = (w = wrapper) => w.find({ ref: 'lockedAndConfidential' });
+ const findIcon = (w = wrapper) => w.findComponent(GlIcon);
+ const findLockedBlock = (w = wrapper) => w.findComponent({ ref: 'locked' });
+ const findConfidentialBlock = (w = wrapper) => w.findComponent({ ref: 'confidential' });
+ const findLockedAndConfidentialBlock = (w = wrapper) =>
+ w.findComponent({ ref: 'lockedAndConfidential' });
const createComponent = (props) =>
shallowMount(NoteableWarning, {
@@ -73,7 +74,7 @@ describe('Issue Warning Component', () => {
});
it('renders warning icon', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(true);
});
it('does not render information about locked noteable', () => {
@@ -99,7 +100,7 @@ describe('Issue Warning Component', () => {
});
it('does not render warning icon', () => {
- expect(wrapper.find(GlIcon).exists()).toBe(false);
+ expect(wrapper.findComponent(GlIcon).exists()).toBe(false);
});
it('does not render information about locked noteable', () => {
diff --git a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
index f951cfd5cd9..b86c8946e96 100644
--- a/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
+++ b/spec/frontend/vue_shared/components/notes/placeholder_note_spec.js
@@ -14,7 +14,7 @@ const getters = {
describe('Issue placeholder note component', () => {
let wrapper;
- const findNote = () => wrapper.find({ ref: 'note' });
+ const findNote = () => wrapper.findComponent({ ref: 'note' });
const createComponent = (isIndividual = false, propsData = {}) => {
wrapper = shallowMount(IssuePlaceholderNote, {
diff --git a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
index 51a936c0509..c0c3c4a9729 100644
--- a/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
+++ b/spec/frontend/vue_shared/components/paginated_table_with_search_and_tabs/paginated_table_with_search_and_tabs_spec.js
@@ -92,15 +92,15 @@ describe('AlertManagementEmptyState', () => {
const EmptyState = () => wrapper.find('.empty-state');
const ItemsTable = () => wrapper.find('.gl-table');
- const ErrorAlert = () => wrapper.find(GlAlert);
- const Pagination = () => wrapper.find(GlPagination);
- const Tabs = () => wrapper.find(GlTabs);
+ const ErrorAlert = () => wrapper.findComponent(GlAlert);
+ const Pagination = () => wrapper.findComponent(GlPagination);
+ const Tabs = () => wrapper.findComponent(GlTabs);
const ActionButton = () => wrapper.find('.header-actions > button');
- const Filters = () => wrapper.find(FilteredSearchBar);
- const findPagination = () => wrapper.find(GlPagination);
- const findStatusFilterTabs = () => wrapper.findAll(GlTab);
- const findStatusTabs = () => wrapper.find(GlTabs);
- const findStatusFilterBadge = () => wrapper.findAll(GlBadge);
+ const Filters = () => wrapper.findComponent(FilteredSearchBar);
+ const findPagination = () => wrapper.findComponent(GlPagination);
+ const findStatusFilterTabs = () => wrapper.findAllComponents(GlTab);
+ const findStatusTabs = () => wrapper.findComponent(GlTabs);
+ const findStatusFilterBadge = () => wrapper.findAllComponents(GlBadge);
describe('Snowplow tracking', () => {
beforeEach(() => {
@@ -213,7 +213,7 @@ describe('AlertManagementEmptyState', () => {
});
it('should render pagination', () => {
- expect(wrapper.find(GlPagination).exists()).toBe(true);
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
});
describe('prevPage', () => {
diff --git a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
index 08119dee8af..b3be2f8a775 100644
--- a/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_bar/pagination_bar_spec.js
@@ -64,7 +64,7 @@ describe('Pagination bar', () => {
},
});
- expect(wrapper.find(GlDropdown).find('button').text()).toMatchInterpolatedText(
+ expect(wrapper.findComponent(GlDropdown).find('button').text()).toMatchInterpolatedText(
`${CURRENT_PAGE_SIZE} items per page`,
);
});
diff --git a/spec/frontend/vue_shared/components/pagination_links_spec.js b/spec/frontend/vue_shared/components/pagination_links_spec.js
index 83f1e2844f9..d444ad7a733 100644
--- a/spec/frontend/vue_shared/components/pagination_links_spec.js
+++ b/spec/frontend/vue_shared/components/pagination_links_spec.js
@@ -41,7 +41,7 @@ describe('Pagination links component', () => {
beforeEach(() => {
createComponent();
- glPagination = wrapper.find(GlPagination);
+ glPagination = wrapper.findComponent(GlPagination);
});
afterEach(() => {
diff --git a/spec/frontend/vue_shared/components/project_avatar_spec.js b/spec/frontend/vue_shared/components/project_avatar_spec.js
index d55f3127a74..af828fbca51 100644
--- a/spec/frontend/vue_shared/components/project_avatar_spec.js
+++ b/spec/frontend/vue_shared/components/project_avatar_spec.js
@@ -42,6 +42,42 @@ describe('ProjectAvatar', () => {
});
});
+ describe('with `projectId` prop', () => {
+ const validatorFunc = ProjectAvatar.props.projectId.validator;
+
+ it('prop validators return true for valid types', () => {
+ expect(validatorFunc(1)).toBe(true);
+ expect(validatorFunc('gid://gitlab/Project/1')).toBe(true);
+ });
+
+ it('prop validators return false for invalid types', () => {
+ expect(validatorFunc('1')).toBe(false);
+ });
+
+ it('renders GlAvatar with `entityId` 0 when `projectId` is not informed', () => {
+ createComponent({ props: { projectId: undefined } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('entityId')).toBe(0);
+ });
+
+ it('renders GlAvatar with specified `entityId` when `projectId` is a Number', () => {
+ const mockProjectId = 1;
+ createComponent({ props: { projectId: mockProjectId } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('entityId')).toBe(mockProjectId);
+ });
+
+ it('renders GlAvatar with specified `entityId` when `projectId` is a gid String', () => {
+ const mockProjectId = 'gid://gitlab/Project/1';
+ createComponent({ props: { projectId: mockProjectId } });
+
+ const avatar = findGlAvatar();
+ expect(avatar.props('entityId')).toBe(1);
+ });
+ });
+
describe('with `projectAvatarUrl` prop', () => {
it('renders GlAvatar with specified `src` prop', () => {
const mockProjectAvatarUrl = 'https://gitlab.com';
diff --git a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
index 397ab2254b9..4e0c318c84e 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_list_item_spec.js
@@ -56,6 +56,7 @@ describe('ProjectListItem component', () => {
expect(avatar.exists()).toBe(true);
expect(avatar.props()).toMatchObject({
+ projectId: project.id,
projectAvatarUrl: '',
projectName: project.name_with_namespace,
});
diff --git a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
index 379e60c1b2d..a0832dd7030 100644
--- a/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
+++ b/spec/frontend/vue_shared/components/project_selector/project_selector_spec.js
@@ -15,7 +15,7 @@ describe('ProjectSelector component', () => {
let selected = [];
selected = selected.concat(allProjects.slice(0, 3)).concat(allProjects.slice(5, 8));
- const findSearchInput = () => wrapper.find(GlSearchBoxByType).find('input');
+ const findSearchInput = () => wrapper.findComponent(GlSearchBoxByType).find('input');
const findLegendText = () => wrapper.find('[data-testid="legend-text"]').text();
const search = (query) => {
const searchInput = findSearchInput();
@@ -65,14 +65,14 @@ describe('ProjectSelector component', () => {
it(`triggers a "bottomReached" event when user has scrolled to the bottom of the list`, () => {
jest.spyOn(vm, '$emit').mockImplementation(() => {});
- wrapper.find(GlInfiniteScroll).vm.$emit('bottomReached');
+ wrapper.findComponent(GlInfiniteScroll).vm.$emit('bottomReached');
expect(vm.$emit).toHaveBeenCalledWith('bottomReached');
});
it(`triggers a "projectClicked" event when a project is clicked`, () => {
jest.spyOn(vm, '$emit').mockImplementation(() => {});
- wrapper.find(ProjectListItem).vm.$emit('click', head(searchResults));
+ wrapper.findComponent(ProjectListItem).vm.$emit('click', head(searchResults));
expect(vm.$emit).toHaveBeenCalledWith('projectClicked', head(searchResults));
});
diff --git a/spec/frontend/vue_shared/components/registry/code_instruction_spec.js b/spec/frontend/vue_shared/components/registry/code_instruction_spec.js
index 3a2ea263a05..8f19f0ea14d 100644
--- a/spec/frontend/vue_shared/components/registry/code_instruction_spec.js
+++ b/spec/frontend/vue_shared/components/registry/code_instruction_spec.js
@@ -22,7 +22,7 @@ describe('Package code instruction', () => {
});
}
- const findCopyButton = () => wrapper.find(ClipboardButton);
+ const findCopyButton = () => wrapper.findComponent(ClipboardButton);
const findInputElement = () => wrapper.find('[data-testid="instruction-input"]');
const findMultilineInstruction = () => wrapper.find('[data-testid="multiline-instruction"]');
diff --git a/spec/frontend/vue_shared/components/registry/details_row_spec.js b/spec/frontend/vue_shared/components/registry/details_row_spec.js
index 3134e0d3e21..ebc9816f983 100644
--- a/spec/frontend/vue_shared/components/registry/details_row_spec.js
+++ b/spec/frontend/vue_shared/components/registry/details_row_spec.js
@@ -5,7 +5,7 @@ import component from '~/vue_shared/components/registry/details_row.vue';
describe('DetailsRow', () => {
let wrapper;
- const findIcon = () => wrapper.find(GlIcon);
+ const findIcon = () => wrapper.findComponent(GlIcon);
const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
const mountComponent = (props) => {
diff --git a/spec/frontend/vue_shared/components/registry/history_item_spec.js b/spec/frontend/vue_shared/components/registry/history_item_spec.js
index f146f87342f..947520567e6 100644
--- a/spec/frontend/vue_shared/components/registry/history_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/history_item_spec.js
@@ -27,8 +27,8 @@ describe('History Item', () => {
wrapper = null;
});
- const findTimelineEntry = () => wrapper.find(TimelineEntryItem);
- const findGlIcon = () => wrapper.find(GlIcon);
+ const findTimelineEntry = () => wrapper.findComponent(TimelineEntryItem);
+ const findGlIcon = () => wrapper.findComponent(GlIcon);
const findDefaultSlot = () => wrapper.find('[data-testid="default-slot"]');
const findBodySlot = () => wrapper.find('[data-testid="body-slot"]');
diff --git a/spec/frontend/vue_shared/components/registry/list_item_spec.js b/spec/frontend/vue_shared/components/registry/list_item_spec.js
index 6e9abb2bfb3..b941eb77c32 100644
--- a/spec/frontend/vue_shared/components/registry/list_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/list_item_spec.js
@@ -13,7 +13,7 @@ describe('list item', () => {
const findRightSecondarySlot = () => wrapper.find('[data-testid="right-secondary"]');
const findRightActionSlot = () => wrapper.find('[data-testid="right-action"]');
const findDetailsSlot = (name) => wrapper.find(`[data-testid="${name}"]`);
- const findToggleDetailsButton = () => wrapper.find(GlButton);
+ const findToggleDetailsButton = () => wrapper.findComponent(GlButton);
const mountComponent = (propsData, slots) => {
wrapper = shallowMount(component, {
diff --git a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
index e4abdc15fd5..a04e1e237d4 100644
--- a/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
+++ b/spec/frontend/vue_shared/components/registry/metadata_item_spec.js
@@ -24,10 +24,10 @@ describe('Metadata Item', () => {
wrapper = null;
});
- const findIcon = () => wrapper.find(GlIcon);
- const findLink = (w = wrapper) => w.find(GlLink);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findLink = (w = wrapper) => w.findComponent(GlLink);
const findText = () => wrapper.find('[data-testid="metadata-item-text"]');
- const findTooltipOnTruncate = (w = wrapper) => w.find(TooltipOnTruncate);
+ const findTooltipOnTruncate = (w = wrapper) => w.findComponent(TooltipOnTruncate);
const findTextTooltip = () => wrapper.find('[data-testid="text-tooltip-container"]');
describe.each(['xs', 's', 'm', 'l', 'xl'])('size class', (size) => {
diff --git a/spec/frontend/vue_shared/components/registry/registry_search_spec.js b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
index 20716e79a04..70f4693ae81 100644
--- a/spec/frontend/vue_shared/components/registry/registry_search_spec.js
+++ b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
@@ -6,9 +6,9 @@ import component from '~/vue_shared/components/registry/registry_search.vue';
describe('Registry Search', () => {
let wrapper;
- const findPackageListSorting = () => wrapper.find(GlSorting);
- const findSortingItems = () => wrapper.findAll(GlSortingItem);
- const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
+ const findPackageListSorting = () => wrapper.findComponent(GlSorting);
+ const findSortingItems = () => wrapper.findAllComponents(GlSortingItem);
+ const findFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
const defaultProps = {
filters: [],
diff --git a/spec/frontend/vue_shared/components/registry/title_area_spec.js b/spec/frontend/vue_shared/components/registry/title_area_spec.js
index b62676b35be..efb57ddd310 100644
--- a/spec/frontend/vue_shared/components/registry/title_area_spec.js
+++ b/spec/frontend/vue_shared/components/registry/title_area_spec.js
@@ -199,7 +199,7 @@ describe('title area', () => {
const message = findInfoMessages().at(0);
- expect(message.find(GlLink).attributes('href')).toBe('bar');
+ expect(message.findComponent(GlLink).attributes('href')).toBe('bar');
expect(message.text()).toBe('foo link');
});
diff --git a/spec/frontend/vue_shared/components/rich_timestamp_tooltip_spec.js b/spec/frontend/vue_shared/components/rich_timestamp_tooltip_spec.js
new file mode 100644
index 00000000000..5d96fe27676
--- /dev/null
+++ b/spec/frontend/vue_shared/components/rich_timestamp_tooltip_spec.js
@@ -0,0 +1,41 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import { formatDate } from '~/lib/utils/datetime_utility';
+import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip.vue';
+
+describe('RichTimestampTooltip', () => {
+ const currentDate = new Date();
+ const mockRawTimestamp = currentDate.toISOString();
+ const mockTimestamp = formatDate(currentDate);
+ let wrapper;
+
+ const createComponent = ({
+ target = 'some-element',
+ rawTimestamp = mockRawTimestamp,
+ timestampTypeText = 'Created',
+ } = {}) => {
+ wrapper = shallowMountExtended(RichTimestampTooltip, {
+ propsData: {
+ target,
+ rawTimestamp,
+ timestampTypeText,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the tooltip text header', () => {
+ expect(wrapper.findByTestId('header-text').text()).toBe('Created just now');
+ });
+
+ it('renders the tooltip text body', () => {
+ expect(wrapper.findByTestId('body-text').text()).toBe(mockTimestamp);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
index a38dcd626f4..7c5fc63856a 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -166,7 +166,7 @@ describe('RunnerInstructionsModal component', () => {
});
it('sets the focus on the default selected platform', () => {
- const findOsxPlatformButton = () => wrapper.find({ ref: 'osx' });
+ const findOsxPlatformButton = () => wrapper.findComponent({ ref: 'osx' });
findOsxPlatformButton().element.focus = jest.fn();
@@ -234,14 +234,14 @@ describe('RunnerInstructionsModal component', () => {
MockResizeObserver.mockResize('xs');
await nextTick();
- expect(findPlatformButtonGroup().attributes('vertical')).toBeTruthy();
+ expect(findPlatformButtonGroup().attributes('vertical')).toEqual('true');
});
it('to a non-xs viewport', async () => {
MockResizeObserver.mockResize('sm');
await nextTick();
- expect(findPlatformButtonGroup().props('vertical')).toBeFalsy();
+ expect(findPlatformButtonGroup().props('vertical')).toBeUndefined();
});
});
});
diff --git a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
index 71ebe561def..c5672bc28cc 100644
--- a/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/artifact_downloads/merge_request_artifact_download_spec.js
@@ -50,7 +50,7 @@ describe('Merge request artifact Download', () => {
return createMockApollo(requestHandlers);
};
- const findDownloadDropdown = () => wrapper.find(SecurityReportDownloadDropdown);
+ const findDownloadDropdown = () => wrapper.findComponent(SecurityReportDownloadDropdown);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js b/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
index ae86106d86e..08d3d5b19d4 100644
--- a/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
+++ b/spec/frontend/vue_shared/components/security_reports/help_icon_spec.js
@@ -17,9 +17,9 @@ describe('HelpIcon component', () => {
});
};
- const findLink = () => wrapper.find(GlLink);
- const findPopover = () => wrapper.find(GlPopover);
- const findPopoverTarget = () => wrapper.find({ ref: 'discoverProjectSecurity' });
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findPopover = () => wrapper.findComponent(GlPopover);
+ const findPopoverTarget = () => wrapper.findComponent({ ref: 'discoverProjectSecurity' });
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js b/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
index f213e37cbc1..9b1316677d7 100644
--- a/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/issuable_move_dropdown_spec.js
@@ -173,15 +173,15 @@ describe('IssuableMoveDropdown', () => {
});
describe('template', () => {
- const findDropdownEl = () => wrapper.find(GlDropdown);
+ const findDropdownEl = () => wrapper.findComponent(GlDropdown);
it('renders collapsed state element with icon', () => {
const collapsedEl = wrapper.find('[data-testid="move-collapsed"]');
expect(collapsedEl.exists()).toBe(true);
expect(collapsedEl.attributes('title')).toBe(mockProps.dropdownButtonTitle);
- expect(collapsedEl.find(GlIcon).exists()).toBe(true);
- expect(collapsedEl.find(GlIcon).props('name')).toBe('arrow-right');
+ expect(collapsedEl.findComponent(GlIcon).exists()).toBe(true);
+ expect(collapsedEl.findComponent(GlIcon).props('name')).toBe('arrow-right');
});
describe('gl-dropdown component', () => {
@@ -191,7 +191,7 @@ describe('IssuableMoveDropdown', () => {
});
it('renders gl-dropdown-form component', () => {
- expect(findDropdownEl().find(GlDropdownForm).exists()).toBe(true);
+ expect(findDropdownEl().findComponent(GlDropdownForm).exists()).toBe(true);
});
it('renders header element', () => {
@@ -199,11 +199,11 @@ describe('IssuableMoveDropdown', () => {
expect(headerEl.exists()).toBe(true);
expect(headerEl.find('span').text()).toBe(mockProps.dropdownHeaderTitle);
- expect(headerEl.find(GlButton).props('icon')).toBe('close');
+ expect(headerEl.findComponent(GlButton).props('icon')).toBe('close');
});
it('renders gl-search-box-by-type component', () => {
- const searchEl = findDropdownEl().find(GlSearchBoxByType);
+ const searchEl = findDropdownEl().findComponent(GlSearchBoxByType);
expect(searchEl.exists()).toBe(true);
expect(searchEl.attributes()).toMatchObject({
@@ -221,7 +221,7 @@ describe('IssuableMoveDropdown', () => {
await nextTick();
- expect(findDropdownEl().find(GlLoadingIcon).exists()).toBe(true);
+ expect(findDropdownEl().findComponent(GlLoadingIcon).exists()).toBe(true);
});
it('renders gl-dropdown-item components for available projects', async () => {
@@ -234,7 +234,7 @@ describe('IssuableMoveDropdown', () => {
await nextTick();
- const dropdownItems = wrapper.findAll(GlDropdownItem);
+ const dropdownItems = wrapper.findAllComponents(GlDropdownItem);
expect(dropdownItems).toHaveLength(mockProjects.length);
expect(dropdownItems.at(0).props()).toMatchObject({
@@ -285,7 +285,7 @@ describe('IssuableMoveDropdown', () => {
});
it('renders gl-button within footer', async () => {
- const moveButtonEl = wrapper.find('[data-testid="footer"]').find(GlButton);
+ const moveButtonEl = wrapper.find('[data-testid="footer"]').findComponent(GlButton);
expect(moveButtonEl.text()).toBe('Move');
expect(moveButtonEl.attributes('disabled')).toBe('true');
@@ -299,7 +299,7 @@ describe('IssuableMoveDropdown', () => {
await nextTick();
expect(
- wrapper.find('[data-testid="footer"]').find(GlButton).attributes('disabled'),
+ wrapper.find('[data-testid="footer"]').findComponent(GlButton).attributes('disabled'),
).not.toBeDefined();
});
});
@@ -308,7 +308,7 @@ describe('IssuableMoveDropdown', () => {
it('collapsed state element emits `toggle-collapse` event on component when clicked', () => {
wrapper.find('[data-testid="move-collapsed"]').trigger('click');
- expect(wrapper.emitted('toggle-collapse')).toBeTruthy();
+ expect(wrapper.emitted('toggle-collapse')).toHaveLength(1);
});
it('gl-dropdown component calls `fetchProjects` on `shown` event', () => {
@@ -337,11 +337,11 @@ describe('IssuableMoveDropdown', () => {
it('gl-dropdown component emits `dropdown-close` event on component from `hide` event', async () => {
findDropdownEl().vm.$emit('hide');
- expect(wrapper.emitted('dropdown-close')).toBeTruthy();
+ expect(wrapper.emitted('dropdown-close')).toHaveLength(1);
});
it('close icon in dropdown header closes the dropdown when clicked', () => {
- wrapper.find('[data-testid="header"]').find(GlButton).vm.$emit('click', mockEvent);
+ wrapper.find('[data-testid="header"]').findComponent(GlButton).vm.$emit('click', mockEvent);
expect(wrapper.vm.$refs.dropdown.hide).toHaveBeenCalled();
});
@@ -355,7 +355,7 @@ describe('IssuableMoveDropdown', () => {
await nextTick();
- wrapper.findAll(GlDropdownItem).at(0).vm.$emit('click', mockEvent);
+ wrapper.findAllComponents(GlDropdownItem).at(0).vm.$emit('click', mockEvent);
expect(wrapper.vm.selectedProject).toBe(mockProjects[0]);
});
@@ -369,10 +369,10 @@ describe('IssuableMoveDropdown', () => {
await nextTick();
- wrapper.find('[data-testid="footer"]').find(GlButton).vm.$emit('click');
+ wrapper.find('[data-testid="footer"]').findComponent(GlButton).vm.$emit('click');
expect(wrapper.vm.$refs.dropdown.hide).toHaveBeenCalled();
- expect(wrapper.emitted('move-issuable')).toBeTruthy();
+ expect(wrapper.emitted('move-issuable')).toHaveLength(1);
expect(wrapper.emitted('move-issuable')[0]).toEqual([mockProjects[0]]);
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
index c05513a6d5f..c0e5408e1bd 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_button_spec.js
@@ -33,9 +33,9 @@ describe('DropdownButton', () => {
wrapper.destroy();
});
- const findDropdownButton = () => wrapper.find(GlButton);
+ const findDropdownButton = () => wrapper.findComponent(GlButton);
const findDropdownText = () => wrapper.find('.dropdown-toggle-text');
- const findDropdownIcon = () => wrapper.find(GlIcon);
+ const findDropdownIcon = () => wrapper.findComponent(GlIcon);
describe('methods', () => {
describe('handleButtonClick', () => {
@@ -61,7 +61,7 @@ describe('DropdownButton', () => {
describe('template', () => {
it('renders component container element', () => {
- expect(wrapper.find(GlButton).element).toBe(wrapper.element);
+ expect(wrapper.findComponent(GlButton).element).toBe(wrapper.element);
});
it('renders default button text element', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
index 0673ffee22b..799e2c1d08e 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_create_view_spec.js
@@ -127,7 +127,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders dropdown back button element', () => {
- const backBtnEl = wrapper.find('.dropdown-title').findAll(GlButton).at(0);
+ const backBtnEl = wrapper.find('.dropdown-title').findAllComponents(GlButton).at(0);
expect(backBtnEl.exists()).toBe(true);
expect(backBtnEl.attributes('aria-label')).toBe('Go back');
@@ -142,7 +142,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders dropdown close button element', () => {
- const closeBtnEl = wrapper.find('.dropdown-title').findAll(GlButton).at(1);
+ const closeBtnEl = wrapper.find('.dropdown-title').findAllComponents(GlButton).at(1);
expect(closeBtnEl.exists()).toBe(true);
expect(closeBtnEl.attributes('aria-label')).toBe('Close');
@@ -150,7 +150,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders label title input element', () => {
- const titleInputEl = wrapper.find('.dropdown-input').find(GlFormInput);
+ const titleInputEl = wrapper.find('.dropdown-input').findComponent(GlFormInput);
expect(titleInputEl.exists()).toBe(true);
expect(titleInputEl.attributes('placeholder')).toBe('Name new label');
@@ -158,7 +158,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders color block element for all suggested colors', () => {
- const colorBlocksEl = wrapper.find('.dropdown-content').findAll(GlLink);
+ const colorBlocksEl = wrapper.find('.dropdown-content').findAllComponents(GlLink);
colorBlocksEl.wrappers.forEach((colorBlock, index) => {
expect(colorBlock.attributes('style')).toContain('background-color');
@@ -175,7 +175,7 @@ describe('DropdownContentsCreateView', () => {
await nextTick();
const colorPreviewEl = wrapper.find('.color-input-container > .dropdown-label-color-preview');
- const colorInputEl = wrapper.find('.color-input-container').find(GlFormInput);
+ const colorInputEl = wrapper.find('.color-input-container').findComponent(GlFormInput);
expect(colorPreviewEl.exists()).toBe(true);
expect(colorPreviewEl.attributes('style')).toContain('background-color');
@@ -185,7 +185,7 @@ describe('DropdownContentsCreateView', () => {
});
it('renders create button element', () => {
- const createBtnEl = wrapper.find('.dropdown-actions').findAll(GlButton).at(0);
+ const createBtnEl = wrapper.find('.dropdown-actions').findAllComponents(GlButton).at(0);
expect(createBtnEl.exists()).toBe(true);
expect(createBtnEl.text()).toContain('Create');
@@ -195,14 +195,14 @@ describe('DropdownContentsCreateView', () => {
wrapper.vm.$store.dispatch('requestCreateLabel');
await nextTick();
- const loadingIconEl = wrapper.find('.dropdown-actions').find(GlLoadingIcon);
+ const loadingIconEl = wrapper.find('.dropdown-actions').findComponent(GlLoadingIcon);
expect(loadingIconEl.exists()).toBe(true);
expect(loadingIconEl.isVisible()).toBe(true);
});
it('renders cancel button element', () => {
- const cancelBtnEl = wrapper.find('.dropdown-actions').findAll(GlButton).at(1);
+ const cancelBtnEl = wrapper.find('.dropdown-actions').findAllComponents(GlButton).at(1);
expect(cancelBtnEl.exists()).toBe(true);
expect(cancelBtnEl.text()).toContain('Cancel');
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
index 00c8e3a814a..cc9b9f393ce 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_contents_labels_view_spec.js
@@ -58,7 +58,7 @@ describe('DropdownContentsLabelsView', () => {
const findDropdownContent = () => wrapper.find('[data-testid="dropdown-content"]');
const findDropdownTitle = () => wrapper.find('[data-testid="dropdown-title"]');
const findDropdownFooter = () => wrapper.find('[data-testid="dropdown-footer"]');
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
describe('computed', () => {
describe('visibleLabels', () => {
@@ -285,7 +285,7 @@ describe('DropdownContentsLabelsView', () => {
describe('template', () => {
it('renders gl-intersection-observer as component root', () => {
- expect(wrapper.find(GlIntersectionObserver).exists()).toBe(true);
+ expect(wrapper.findComponent(GlIntersectionObserver).exists()).toBe(true);
});
it('renders gl-loading-icon component when `labelsFetchInProgress` prop is true', async () => {
@@ -316,20 +316,20 @@ describe('DropdownContentsLabelsView', () => {
});
it('renders dropdown close button element', () => {
- const closeButtonEl = findDropdownTitle().find(GlButton);
+ const closeButtonEl = findDropdownTitle().findComponent(GlButton);
expect(closeButtonEl.exists()).toBe(true);
expect(closeButtonEl.props('icon')).toBe('close');
});
it('renders label search input element', () => {
- const searchInputEl = wrapper.find(GlSearchBoxByType);
+ const searchInputEl = wrapper.findComponent(GlSearchBoxByType);
expect(searchInputEl.exists()).toBe(true);
});
it('renders label elements for all labels', () => {
- expect(wrapper.findAll(LabelItem)).toHaveLength(mockLabels.length);
+ expect(wrapper.findAllComponents(LabelItem)).toHaveLength(mockLabels.length);
});
it('renders label element with `highlight` set to true when value of `currentHighlightItem` is more than -1', async () => {
@@ -340,7 +340,7 @@ describe('DropdownContentsLabelsView', () => {
});
await nextTick();
- const labelItemEl = findDropdownContent().find(LabelItem);
+ const labelItemEl = findDropdownContent().findComponent(LabelItem);
expect(labelItemEl.attributes('highlight')).toBe('true');
});
@@ -373,7 +373,7 @@ describe('DropdownContentsLabelsView', () => {
});
it('renders footer list items', () => {
- const footerLinks = findDropdownFooter().findAll(GlLink);
+ const footerLinks = findDropdownFooter().findAllComponents(GlLink);
const createLabelLink = footerLinks.at(0);
const manageLabelsLink = footerLinks.at(1);
@@ -387,7 +387,7 @@ describe('DropdownContentsLabelsView', () => {
wrapper.vm.$store.state.allowLabelCreate = false;
await nextTick();
- const createLabelLink = findDropdownFooter().findAll(GlLink).at(0);
+ const createLabelLink = findDropdownFooter().findAllComponents(GlLink).at(0);
expect(createLabelLink.text()).not.toBe('Create label');
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
index 84e9f3f41c3..54804f85f81 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/dropdown_title_spec.js
@@ -41,7 +41,7 @@ describe('DropdownTitle', () => {
});
it('renders edit link', () => {
- const editBtnEl = wrapper.find(GlButton);
+ const editBtnEl = wrapper.findComponent(GlButton);
expect(editBtnEl.exists()).toBe(true);
expect(editBtnEl.text()).toBe('Edit');
@@ -53,7 +53,7 @@ describe('DropdownTitle', () => {
});
await nextTick();
- expect(wrapper.find(GlLoadingIcon).isVisible()).toBe(true);
+ expect(wrapper.findComponent(GlLoadingIcon).isVisible()).toBe(true);
});
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
index bedb6204088..bb0f1777de6 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/label_item_spec.js
@@ -32,7 +32,7 @@ describe('LabelItem', () => {
describe('template', () => {
it('renders gl-link component', () => {
- expect(wrapper.find(GlLink).exists()).toBe(true);
+ expect(wrapper.findComponent(GlLink).exists()).toBe(true);
});
it('renders component root with class `is-focused` when `highlight` prop is true', () => {
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
index c150410ff8e..4c7ac6e9a6f 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_vue/labels_select_root_spec.js
@@ -138,13 +138,13 @@ describe('LabelsSelectRoot', () => {
it('renders `dropdown-value-collapsed` component when `allowLabelCreate` prop is `true`', async () => {
createComponent();
await nextTick();
- expect(wrapper.find(DropdownValueCollapsed).exists()).toBe(true);
+ expect(wrapper.findComponent(DropdownValueCollapsed).exists()).toBe(true);
});
it('renders `dropdown-title` component', async () => {
createComponent();
await nextTick();
- expect(wrapper.find(DropdownTitle).exists()).toBe(true);
+ expect(wrapper.findComponent(DropdownTitle).exists()).toBe(true);
});
it('renders `dropdown-value` component', async () => {
@@ -153,7 +153,7 @@ describe('LabelsSelectRoot', () => {
});
await nextTick();
- const valueComp = wrapper.find(DropdownValue);
+ const valueComp = wrapper.findComponent(DropdownValue);
expect(valueComp.exists()).toBe(true);
expect(valueComp.text()).toBe('None');
@@ -163,14 +163,14 @@ describe('LabelsSelectRoot', () => {
createComponent();
wrapper.vm.$store.dispatch('toggleDropdownButton');
await nextTick();
- expect(wrapper.find(DropdownButton).exists()).toBe(true);
+ expect(wrapper.findComponent(DropdownButton).exists()).toBe(true);
});
it('renders `dropdown-contents` component when `showDropdownButton` & `showDropdownContents` prop is `true`', async () => {
createComponent();
wrapper.vm.$store.dispatch('toggleDropdownContents');
await nextTick();
- expect(wrapper.find(DropdownContents).exists()).toBe(true);
+ expect(wrapper.findComponent(DropdownContents).exists()).toBe(true);
});
describe('sets content direction based on viewport', () => {
@@ -187,7 +187,7 @@ describe('LabelsSelectRoot', () => {
wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
await nextTick();
- expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(true);
+ expect(wrapper.findComponent(DropdownContents).props('renderOnTop')).toBe(true);
});
it('does not set direction when inside of viewport', async () => {
@@ -195,7 +195,7 @@ describe('LabelsSelectRoot', () => {
wrapper.vm.setContentIsOnViewport(wrapper.vm.$store.state);
await nextTick();
- expect(wrapper.find(DropdownContents).props('renderOnTop')).toBe(false);
+ expect(wrapper.findComponent(DropdownContents).props('renderOnTop')).toBe(false);
});
},
);
diff --git a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
index 1b27a294b90..cad401e0013 100644
--- a/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/labels_select_widget/labels_select_root_spec.js
@@ -131,6 +131,7 @@ describe('LabelsSelectRoot', () => {
expect(findDropdownValue().exists()).toBe(true);
expect(findDropdownValue().props('selectedLabels')).toEqual([
{
+ __typename: 'Label',
color: '#330066',
description: null,
id: 'gid://gitlab/ProjectLabel/1',
diff --git a/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js b/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js
index de3e1ccfb03..01958a144ed 100644
--- a/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js
+++ b/spec/frontend/vue_shared/components/sidebar/todo_button_spec.js
@@ -30,19 +30,19 @@ describe('Todo Button', () => {
it('renders GlButton', () => {
createComponent();
- expect(wrapper.find(GlButton).exists()).toBe(true);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(true);
});
it('emits click event when clicked', () => {
createComponent({}, mount);
- wrapper.find(GlButton).trigger('click');
+ wrapper.findComponent(GlButton).trigger('click');
- expect(wrapper.emitted().click).toBeTruthy();
+ expect(wrapper.emitted().click).toHaveLength(1);
});
it('calls dispatchDocumentEvent to update global To-Do counter correctly', () => {
createComponent({}, mount);
- wrapper.find(GlButton).trigger('click');
+ wrapper.findComponent(GlButton).trigger('click');
const dispatchedEvent = dispatchEventSpy.mock.calls[0][0];
expect(dispatchEventSpy).toHaveBeenCalledTimes(1);
@@ -57,12 +57,12 @@ describe('Todo Button', () => {
`('sets correct label when isTodo is $isTodo', ({ label, isTodo }) => {
createComponent({ isTodo });
- expect(wrapper.find(GlButton).text()).toBe(label);
+ expect(wrapper.findComponent(GlButton).text()).toBe(label);
});
it('binds additional props to GlButton', () => {
createComponent({ loading: true });
- expect(wrapper.find(GlButton).props('loading')).toBe(true);
+ expect(wrapper.findComponent(GlButton).props('loading')).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/components/source_editor_spec.js b/spec/frontend/vue_shared/components/source_editor_spec.js
index dca4d60e23c..ca5b990bc29 100644
--- a/spec/frontend/vue_shared/components/source_editor_spec.js
+++ b/spec/frontend/vue_shared/components/source_editor_spec.js
@@ -3,6 +3,7 @@ import { nextTick } from 'vue';
import { EDITOR_READY_EVENT } from '~/editor/constants';
import Editor from '~/editor/source_editor';
import SourceEditor from '~/vue_shared/components/source_editor.vue';
+import * as helpers from 'jest/editor/helpers';
jest.mock('~/editor/source_editor');
@@ -13,6 +14,7 @@ describe('Source Editor component', () => {
const value = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.';
const fileName = 'lorem.txt';
const fileGlobalId = 'snippet_777';
+ const useSpy = jest.fn();
const createInstanceMock = jest.fn().mockImplementation(() => {
mockInstance = {
onDidChangeModelContent: jest.fn(),
@@ -20,6 +22,7 @@ describe('Source Editor component', () => {
getValue: jest.fn(),
setValue: jest.fn(),
dispose: jest.fn(),
+ use: useSpy,
};
return mockInstance;
});
@@ -77,16 +80,33 @@ describe('Source Editor component', () => {
});
it('initialises Source Editor instance', () => {
- const el = wrapper.find({ ref: 'editor' }).element;
+ const el = wrapper.findComponent({ ref: 'editor' }).element;
expect(createInstanceMock).toHaveBeenCalledWith({
el,
blobPath: fileName,
blobGlobalId: fileGlobalId,
blobContent: value,
- extensions: null,
});
});
+ it.each`
+ description | extensions | toBeCalled
+ ${'no extension when `undefined` is'} | ${undefined} | ${false}
+ ${'no extension when {} is'} | ${{}} | ${false}
+ ${'no extension when [] is'} | ${[]} | ${false}
+ ${'single extension'} | ${{ definition: helpers.SEClassExtension }} | ${true}
+ ${'single extension with options'} | ${{ definition: helpers.SEWithSetupExt, setupOptions: { foo: 'bar' } }} | ${true}
+ ${'multiple extensions'} | ${[{ definition: helpers.SEClassExtension }, { definition: helpers.SEWithSetupExt }]} | ${true}
+ ${'multiple extensions with options'} | ${[{ definition: helpers.SEClassExtension }, { definition: helpers.SEWithSetupExt, setupOptions: { foo: 'bar' } }]} | ${true}
+ `('installs $description passed as a prop', ({ extensions, toBeCalled }) => {
+ createComponent({ extensions });
+ if (toBeCalled) {
+ expect(useSpy).toHaveBeenCalledWith(extensions);
+ } else {
+ expect(useSpy).not.toHaveBeenCalled();
+ }
+ });
+
it('reacts to the changes in fileName', () => {
const newFileName = 'ipsum.txt';
@@ -112,7 +132,7 @@ describe('Source Editor component', () => {
});
it('emits EDITOR_READY_EVENT event when the Source Editor is ready', async () => {
- const el = wrapper.find({ ref: 'editor' }).element;
+ const el = wrapper.findComponent({ ref: 'editor' }).element;
expect(wrapper.emitted()[EDITOR_READY_EVENT]).toBeUndefined();
await el.dispatchEvent(new Event(EDITOR_READY_EVENT));
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
index eb2eec92534..fd3ff9ce892 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_line_spec.js
@@ -1,4 +1,3 @@
-import { GlLink } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ChunkLine from '~/vue_shared/components/source_viewer/components/chunk_line.vue';
import {
@@ -11,16 +10,26 @@ const DEFAULT_PROPS = {
number: 2,
content: '// Line content',
language: 'javascript',
+ blamePath: 'blame/file.js',
};
describe('Chunk Line component', () => {
let wrapper;
+ const fileLineBlame = true;
const createComponent = (props = {}) => {
- wrapper = shallowMountExtended(ChunkLine, { propsData: { ...DEFAULT_PROPS, ...props } });
+ wrapper = shallowMountExtended(ChunkLine, {
+ propsData: { ...DEFAULT_PROPS, ...props },
+ provide: {
+ glFeatures: {
+ fileLineBlame,
+ },
+ },
+ });
};
- const findLink = () => wrapper.findComponent(GlLink);
+ const findLineLink = () => wrapper.find('.file-line-num');
+ const findBlameLink = () => wrapper.find('.file-line-blame');
const findContent = () => wrapper.findByTestId('content');
const findWrappedBidiChars = () => wrapper.findAllByTestId('bidi-wrapper');
@@ -47,14 +56,22 @@ describe('Chunk Line component', () => {
});
});
+ it('renders a blame link', () => {
+ expect(findBlameLink().attributes()).toMatchObject({
+ href: `${DEFAULT_PROPS.blamePath}#L${DEFAULT_PROPS.number}`,
+ });
+
+ expect(findBlameLink().text()).toBe('');
+ });
+
it('renders a line number', () => {
- expect(findLink().attributes()).toMatchObject({
+ expect(findLineLink().attributes()).toMatchObject({
'data-line-number': `${DEFAULT_PROPS.number}`,
- to: `#L${DEFAULT_PROPS.number}`,
+ href: `#L${DEFAULT_PROPS.number}`,
id: `L${DEFAULT_PROPS.number}`,
});
- expect(findLink().text()).toBe(DEFAULT_PROPS.number.toString());
+ expect(findLineLink().text()).toBe(DEFAULT_PROPS.number.toString());
});
it('renders content', () => {
diff --git a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
index 42c4f2eacb8..8dc3348acfa 100644
--- a/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/components/chunk_spec.js
@@ -10,6 +10,7 @@ const DEFAULT_PROPS = {
startingFrom: 140,
totalLines: 50,
language: 'javascript',
+ blamePath: 'blame/file.js',
};
describe('Chunk component', () => {
@@ -76,6 +77,7 @@ describe('Chunk component', () => {
number: DEFAULT_PROPS.startingFrom + 1,
content: splitContent[0],
language: DEFAULT_PROPS.language,
+ blamePath: DEFAULT_PROPS.blamePath,
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js
index 3036ce43888..375b1307616 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/link_dependencies_spec.js
@@ -1,8 +1,10 @@
import packageJsonLinker from '~/vue_shared/components/source_viewer/plugins/utils/package_json_linker';
+import gemspecLinker from '~/vue_shared/components/source_viewer/plugins/utils/gemspec_linker';
import linkDependencies from '~/vue_shared/components/source_viewer/plugins/link_dependencies';
-import { PACKAGE_JSON_FILE_TYPE, PACKAGE_JSON_CONTENT } from './mock_data';
+import { PACKAGE_JSON_FILE_TYPE, PACKAGE_JSON_CONTENT, GEMSPEC_FILE_TYPE } from './mock_data';
jest.mock('~/vue_shared/components/source_viewer/plugins/utils/package_json_linker');
+jest.mock('~/vue_shared/components/source_viewer/plugins/utils/gemspec_linker');
describe('Highlight.js plugin for linking dependencies', () => {
const hljsResultMock = { value: 'test' };
@@ -11,4 +13,9 @@ describe('Highlight.js plugin for linking dependencies', () => {
linkDependencies(hljsResultMock, PACKAGE_JSON_FILE_TYPE, PACKAGE_JSON_CONTENT);
expect(packageJsonLinker).toHaveBeenCalled();
});
+
+ it('calls gemspecLinker for gemspec file types', () => {
+ linkDependencies(hljsResultMock, GEMSPEC_FILE_TYPE);
+ expect(gemspecLinker).toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js b/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js
index 75659770e2c..aa874c9c081 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/mock_data.js
@@ -1,2 +1,4 @@
export const PACKAGE_JSON_FILE_TYPE = 'package_json';
export const PACKAGE_JSON_CONTENT = '{ "dependencies": { "@babel/core": "^7.18.5" } }';
+
+export const GEMSPEC_FILE_TYPE = 'gemspec';
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
index ee200747af9..8079d5ad99a 100644
--- a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/dependency_linker_util_spec.js
@@ -14,10 +14,11 @@ describe('createLink', () => {
it('escapes the user-controlled content', () => {
const unescapedXSS = '<script>XSS</script>';
- const escapedXSS = '&amp;lt;script&amp;gt;XSS&amp;lt;/script&amp;gt;';
+ const escapedPackageName = '&lt;script&gt;XSS&lt;/script&gt;';
+ const escapedHref = '&amp;lt;script&amp;gt;XSS&amp;lt;/script&amp;gt;';
const href = `http://test.com/${unescapedXSS}`;
const innerText = `testing${unescapedXSS}`;
- const result = `<a href="http://test.com/${escapedXSS}" rel="nofollow noreferrer noopener">testing${escapedXSS}</a>`;
+ const result = `<a href="http://test.com/${escapedHref}" rel="nofollow noreferrer noopener">testing${escapedPackageName}</a>`;
expect(createLink(href, innerText)).toBe(result);
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/plugins/utils/gemspec_linker_spec.js b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/gemspec_linker_spec.js
new file mode 100644
index 00000000000..3f74bfa117f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/source_viewer/plugins/utils/gemspec_linker_spec.js
@@ -0,0 +1,14 @@
+import gemspecLinker from '~/vue_shared/components/source_viewer/plugins/utils/gemspec_linker';
+
+describe('Highlight.js plugin for linking gemspec dependencies', () => {
+ it('mutates the input value by wrapping dependency names in anchors', () => {
+ const inputValue =
+ 's.add_dependency(<span class="hljs-string">&#x27;rugged&#x27;</span>, <span class="hljs-string">&#x27;~&gt; 0.24.0&#x27;</span>)';
+ const outputValue =
+ 's.add_dependency(<span class="hljs-string linked">&#x27;<a href="https://rubygems.org/gems/rugged" rel="nofollow noreferrer noopener">rugged</a>&#x27;</span>, <span class="hljs-string">&#x27;~&gt; 0.24.0&#x27;</span>)';
+ const hljsResultMock = { value: inputValue };
+
+ const output = gemspecLinker(hljsResultMock);
+ expect(output).toBe(outputValue);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 2c03b7aa7d3..4fbc907a813 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -40,8 +40,9 @@ describe('Source Viewer component', () => {
const chunk2 = generateContent('// Some source code 2', 70);
const content = chunk1 + chunk2;
const path = 'some/path.js';
+ const blamePath = 'some/blame/path.js';
const fileType = 'javascript';
- const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, fileType };
+ const DEFAULT_BLOB_DATA = { language, rawTextBlob: content, path, blamePath, fileType };
const highlightedContent = `<span data-testid='test-highlighted' id='LC1'>${content}</span><span id='LC2'></span>`;
const createComponent = async (blob = {}) => {
diff --git a/spec/frontend/vue_shared/components/split_button_spec.js b/spec/frontend/vue_shared/components/split_button_spec.js
index 4965969bc3e..6b869db4058 100644
--- a/spec/frontend/vue_shared/components/split_button_spec.js
+++ b/spec/frontend/vue_shared/components/split_button_spec.js
@@ -26,8 +26,9 @@ describe('SplitButton', () => {
});
};
- const findDropdown = () => wrapper.find(GlDropdown);
- const findDropdownItem = (index = 0) => findDropdown().findAll(GlDropdownItem).at(index);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItem = (index = 0) =>
+ findDropdown().findAllComponents(GlDropdownItem).at(index);
const selectItem = async (index) => {
findDropdownItem(index).vm.$emit('click');
diff --git a/spec/frontend/vue_shared/components/table_pagination_spec.js b/spec/frontend/vue_shared/components/table_pagination_spec.js
index ed23a47c328..99de26ce2ae 100644
--- a/spec/frontend/vue_shared/components/table_pagination_spec.js
+++ b/spec/frontend/vue_shared/components/table_pagination_spec.js
@@ -50,7 +50,7 @@ describe('Pagination component', () => {
change: spy,
});
- expect(wrapper.find(GlPagination).exists()).toBe(true);
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
});
it('renders if there is a prev page', () => {
@@ -66,7 +66,7 @@ describe('Pagination component', () => {
change: spy,
});
- expect(wrapper.find(GlPagination).exists()).toBe(true);
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
});
});
@@ -83,7 +83,7 @@ describe('Pagination component', () => {
},
change: spy,
});
- wrapper.find(GlPagination).vm.$emit('input', 3);
+ wrapper.findComponent(GlPagination).vm.$emit('input', 3);
expect(spy).toHaveBeenCalledWith(3);
});
});
diff --git a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
index 9e7e5c1263f..ca1f7996ad6 100644
--- a/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
+++ b/spec/frontend/vue_shared/components/tooltip_on_truncate_spec.js
@@ -68,7 +68,7 @@ describe('TooltipOnTruncate component', () => {
},
);
- wrapper = parent.find(WrappedTooltipOnTruncate);
+ wrapper = parent.findComponent(WrappedTooltipOnTruncate);
};
const getTooltipValue = () => getBinding(wrapper.element, 'gl-tooltip')?.value;
diff --git a/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js b/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js
index 21e9b401215..a063a5591e3 100644
--- a/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js
+++ b/spec/frontend/vue_shared/components/upload_dropzone/upload_dropzone_spec.js
@@ -14,7 +14,7 @@ describe('Upload dropzone component', () => {
const findDropzoneCard = () => wrapper.find('.upload-dropzone-card');
const findDropzoneArea = () => wrapper.find('[data-testid="dropzone-area"]');
- const findIcon = () => wrapper.find(GlIcon);
+ const findIcon = () => wrapper.findComponent(GlIcon);
const findUploadText = () => wrapper.find('[data-testid="upload-text"]').text();
const findFileInput = () => wrapper.find('input[type="file"]');
diff --git a/spec/frontend/vue_shared/components/user_access_role_badge_spec.js b/spec/frontend/vue_shared/components/user_access_role_badge_spec.js
index 7f25f7c08e7..cea6fcac8c8 100644
--- a/spec/frontend/vue_shared/components/user_access_role_badge_spec.js
+++ b/spec/frontend/vue_shared/components/user_access_role_badge_spec.js
@@ -18,7 +18,7 @@ describe('UserAccessRoleBadge', () => {
},
});
- const badge = wrapper.find(GlBadge);
+ const badge = wrapper.findComponent(GlBadge);
expect(badge.exists()).toBe(true);
expect(badge.html()).toContain('test slot content');
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
index 5e05b54cb8c..f87737ca86a 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_new_spec.js
@@ -18,6 +18,8 @@ const PROVIDED_PROPS = {
describe('User Avatar Image Component', () => {
let wrapper;
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+
afterEach(() => {
wrapper.destroy();
});
@@ -28,21 +30,14 @@ describe('User Avatar Image Component', () => {
propsData: {
...PROVIDED_PROPS,
},
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
});
});
it('should render `GlAvatar` and provide correct properties to it', () => {
- const avatar = wrapper.findComponent(GlAvatar);
-
- expect(avatar.attributes('data-src')).toBe(
+ expect(findAvatar().attributes('data-src')).toBe(
`${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
);
- expect(avatar.props()).toMatchObject({
+ expect(findAvatar().props()).toMatchObject({
src: `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
alt: PROVIDED_PROPS.imgAlt,
size: PROVIDED_PROPS.size,
@@ -63,23 +58,28 @@ describe('User Avatar Image Component', () => {
...PROVIDED_PROPS,
lazy: true,
},
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
});
});
it('should add lazy attributes', () => {
- const avatar = wrapper.findComponent(GlAvatar);
-
- expect(avatar.classes()).toContain('lazy');
- expect(avatar.attributes()).toMatchObject({
+ expect(findAvatar().classes()).toContain('lazy');
+ expect(findAvatar().attributes()).toMatchObject({
src: placeholderImage,
'data-src': `${PROVIDED_PROPS.imgSrc}?width=${PROVIDED_PROPS.size}`,
});
});
+
+ it('should use maximum number when size is provided as an object', () => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ size: { default: 16, md: 64, lg: 24 },
+ lazy: true,
+ },
+ });
+
+ expect(findAvatar().attributes('data-src')).toBe(`${PROVIDED_PROPS.imgSrc}?width=${64}`);
+ });
});
describe('Initialization without src', () => {
@@ -89,18 +89,11 @@ describe('User Avatar Image Component', () => {
...PROVIDED_PROPS,
imgSrc: null,
},
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
});
});
it('should have default avatar image', () => {
- const avatar = wrapper.findComponent(GlAvatar);
-
- expect(avatar.props('src')).toBe(`${defaultAvatarUrl}?width=${PROVIDED_PROPS.size}`);
+ expect(findAvatar().props('src')).toBe(`${defaultAvatarUrl}?width=${PROVIDED_PROPS.size}`);
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
index 75d2a936b34..6ad2ef226c2 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -15,47 +15,37 @@ const PROVIDED_PROPS = {
describe('User Avatar Image Component', () => {
let wrapper;
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('when `glAvatarForAllUserAvatars` feature flag enabled', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
+ const createWrapper = (props = {}, { glAvatarForAllUserAvatars } = {}) => {
+ wrapper = shallowMount(UserAvatarImage, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ ...props,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars,
},
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
- });
+ },
});
+ };
- it('should render `UserAvatarImageNew` component', () => {
- expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(true);
- expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(false);
- });
+ afterEach(() => {
+ wrapper.destroy();
});
- describe('when `glAvatarForAllUserAvatars` feature flag disabled', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarImage, {
- propsData: {
- ...PROVIDED_PROPS,
- },
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: false,
- },
- },
+ describe.each([
+ [false, true, true],
+ [true, false, true],
+ [true, true, true],
+ [false, false, false],
+ ])(
+ 'when glAvatarForAllUserAvatars=%s and enforceGlAvatar=%s',
+ (glAvatarForAllUserAvatars, enforceGlAvatar, isUsingNewVersion) => {
+ it(`will render ${isUsingNewVersion ? 'new' : 'old'} version`, () => {
+ createWrapper({ enforceGlAvatar }, { glAvatarForAllUserAvatars });
+ expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(isUsingNewVersion);
+ expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(!isUsingNewVersion);
});
- });
-
- it('should render `UserAvatarImageOld` component', () => {
- expect(wrapper.findComponent(UserAvatarImageNew).exists()).toBe(false);
- expect(wrapper.findComponent(UserAvatarImageOld).exists()).toBe(true);
- });
- });
+ },
+ );
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
index 5ba80b31b99..f485a14cfea 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_new_spec.js
@@ -54,6 +54,7 @@ describe('User Avatar Link Component', () => {
size: defaultProps.imgSize,
tooltipPlacement: defaultProps.tooltipPlacement,
tooltipText: '',
+ enforceGlAvatar: false,
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
index 2d513c46e77..cf7a1025dba 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_old_spec.js
@@ -54,6 +54,7 @@ describe('User Avatar Link Component', () => {
size: defaultProps.imgSize,
tooltipPlacement: defaultProps.tooltipPlacement,
tooltipText: '',
+ enforceGlAvatar: false,
});
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
index b36b83d1fea..fd3f59008ec 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_link_spec.js
@@ -15,47 +15,37 @@ const PROVIDED_PROPS = {
describe('User Avatar Link Component', () => {
let wrapper;
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('when `glAvatarForAllUserAvatars` feature flag enabled', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarLink, {
- propsData: {
- ...PROVIDED_PROPS,
+ const createWrapper = (props = {}, { glAvatarForAllUserAvatars } = {}) => {
+ wrapper = shallowMount(UserAvatarLink, {
+ propsData: {
+ ...PROVIDED_PROPS,
+ ...props,
+ },
+ provide: {
+ glFeatures: {
+ glAvatarForAllUserAvatars,
},
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: true,
- },
- },
- });
+ },
});
+ };
- it('should render `UserAvatarLinkNew` component', () => {
- expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(true);
- expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(false);
- });
+ afterEach(() => {
+ wrapper.destroy();
});
- describe('when `glAvatarForAllUserAvatars` feature flag disabled', () => {
- beforeEach(() => {
- wrapper = shallowMount(UserAvatarLink, {
- propsData: {
- ...PROVIDED_PROPS,
- },
- provide: {
- glFeatures: {
- glAvatarForAllUserAvatars: false,
- },
- },
+ describe.each([
+ [false, true, true],
+ [true, false, true],
+ [true, true, true],
+ [false, false, false],
+ ])(
+ 'when glAvatarForAllUserAvatars=%s and enforceGlAvatar=%s',
+ (glAvatarForAllUserAvatars, enforceGlAvatar, isUsingNewVersion) => {
+ it(`will render ${isUsingNewVersion ? 'new' : 'old'} version`, () => {
+ createWrapper({ enforceGlAvatar }, { glAvatarForAllUserAvatars });
+ expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(isUsingNewVersion);
+ expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(!isUsingNewVersion);
});
- });
-
- it('should render `UserAvatarLinkOld` component', () => {
- expect(wrapper.findComponent(UserAvatarLinkNew).exists()).toBe(false);
- expect(wrapper.findComponent(UserAvatarLinkOld).exists()).toBe(true);
- });
- });
+ },
+ );
});
diff --git a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
index 20ff0848cff..b9accbf0373 100644
--- a/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
+++ b/spec/frontend/vue_shared/components/user_avatar/user_avatar_list_spec.js
@@ -38,7 +38,7 @@ describe('UserAvatarList', () => {
};
const clickButton = () => {
- const button = wrapper.find(GlButton);
+ const button = wrapper.findComponent(GlButton);
button.vm.$emit('click');
};
@@ -79,7 +79,7 @@ describe('UserAvatarList', () => {
const items = createList(20);
factory({ propsData: { items } });
- const links = wrapper.findAll(UserAvatarLink);
+ const links = wrapper.findAllComponents(UserAvatarLink);
const linkProps = links.wrappers.map((x) => x.props());
expect(linkProps).toEqual(
@@ -105,7 +105,7 @@ describe('UserAvatarList', () => {
it('renders all avatars if length is <= breakpoint', () => {
factory();
- const links = wrapper.findAll(UserAvatarLink);
+ const links = wrapper.findAllComponents(UserAvatarLink);
expect(links.length).toEqual(props.items.length);
});
@@ -113,7 +113,7 @@ describe('UserAvatarList', () => {
it('does not show button', () => {
factory();
- expect(wrapper.find(GlButton).exists()).toBe(false);
+ expect(wrapper.findComponent(GlButton).exists()).toBe(false);
});
});
@@ -126,7 +126,7 @@ describe('UserAvatarList', () => {
it('renders avatars up to breakpoint', () => {
factory();
- const links = wrapper.findAll(UserAvatarLink);
+ const links = wrapper.findAllComponents(UserAvatarLink);
expect(links.length).toEqual(TEST_BREAKPOINT);
});
@@ -138,7 +138,7 @@ describe('UserAvatarList', () => {
});
it('renders all avatars', () => {
- const links = wrapper.findAll(UserAvatarLink);
+ const links = wrapper.findAllComponents(UserAvatarLink);
expect(links.length).toEqual(props.items.length);
});
@@ -147,7 +147,7 @@ describe('UserAvatarList', () => {
clickButton();
await nextTick();
- const links = wrapper.findAll(UserAvatarLink);
+ const links = wrapper.findAllComponents(UserAvatarLink);
expect(links.length).toEqual(TEST_BREAKPOINT);
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 9550368eefc..b7ce3e47cef 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -6,6 +6,7 @@ import UserPopover from '~/vue_shared/components/user_popover/user_popover.vue';
import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash';
import { followUser, unfollowUser } from '~/api/user_api';
+import { mockTracking } from 'helpers/tracking_helper';
jest.mock('~/flash');
jest.mock('~/api/user_api', () => ({
@@ -51,6 +52,18 @@ describe('User Popover Component', () => {
const findUserLocalTime = () => wrapper.findByTestId('user-popover-local-time');
const findToggleFollowButton = () => wrapper.findByTestId('toggle-follow-button');
+ const itTracksToggleFollowButtonClick = (expectedLabel) => {
+ it('tracks click', async () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ await findToggleFollowButton().trigger('click');
+
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_button', {
+ label: expectedLabel,
+ });
+ });
+ };
+
const createWrapper = (props = {}) => {
wrapper = mountExtended(UserPopover, {
propsData: {
@@ -75,7 +88,7 @@ describe('User Popover Component', () => {
},
});
- expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
});
@@ -89,7 +102,7 @@ describe('User Popover Component', () => {
it('shows icon for location', () => {
createWrapper();
- const iconEl = wrapper.find(GlIcon);
+ const iconEl = wrapper.findComponent(GlIcon);
expect(iconEl.props('name')).toEqual('location');
});
@@ -102,8 +115,8 @@ describe('User Popover Component', () => {
});
describe('job data', () => {
- const findWorkInformation = () => wrapper.find({ ref: 'workInformation' });
- const findBio = () => wrapper.find({ ref: 'bio' });
+ const findWorkInformation = () => wrapper.findComponent({ ref: 'workInformation' });
+ const findBio = () => wrapper.findComponent({ ref: 'bio' });
const bio = 'My super interesting bio';
it('should show only bio if work information is not available', () => {
@@ -159,7 +172,7 @@ describe('User Popover Component', () => {
createWrapper({ user });
expect(
- wrapper.findAll(GlIcon).filter((icon) => icon.props('name') === 'profile').length,
+ wrapper.findAllComponents(GlIcon).filter((icon) => icon.props('name') === 'profile').length,
).toEqual(1);
});
@@ -172,7 +185,7 @@ describe('User Popover Component', () => {
createWrapper({ user });
expect(
- wrapper.findAll(GlIcon).filter((icon) => icon.props('name') === 'work').length,
+ wrapper.findAllComponents(GlIcon).filter((icon) => icon.props('name') === 'work').length,
).toEqual(1);
});
});
@@ -338,9 +351,11 @@ describe('User Popover Component', () => {
await axios.waitForAll();
expect(wrapper.emitted().follow.length).toBe(1);
- expect(wrapper.emitted().unfollow).toBeFalsy();
+ expect(wrapper.emitted().unfollow).toBeUndefined();
});
+ itTracksToggleFollowButtonClick('follow_from_user_popover');
+
describe('when an error occurs', () => {
beforeEach(() => {
followUser.mockRejectedValue({});
@@ -361,8 +376,8 @@ describe('User Popover Component', () => {
it('emits no events', async () => {
await axios.waitForAll();
- expect(wrapper.emitted().follow).toBe(undefined);
- expect(wrapper.emitted().unfollow).toBe(undefined);
+ expect(wrapper.emitted().follow).toBeUndefined();
+ expect(wrapper.emitted().unfollow).toBeUndefined();
});
});
});
@@ -388,6 +403,8 @@ describe('User Popover Component', () => {
expect(wrapper.emitted().unfollow.length).toBe(1);
});
+ itTracksToggleFollowButtonClick('unfollow_from_user_popover');
+
describe('when an error occurs', () => {
beforeEach(async () => {
unfollowUser.mockRejectedValue({});
@@ -406,8 +423,8 @@ describe('User Popover Component', () => {
});
it('emits no events', () => {
- expect(wrapper.emitted().follow).toBe(undefined);
- expect(wrapper.emitted().unfollow).toBe(undefined);
+ expect(wrapper.emitted().follow).toBeUndefined();
+ expect(wrapper.emitted().unfollow).toBeUndefined();
});
});
});
diff --git a/spec/frontend/vue_shared/components/user_select_spec.js b/spec/frontend/vue_shared/components/user_select_spec.js
index ec9128d5e38..4188adc72a1 100644
--- a/spec/frontend/vue_shared/components/user_select_spec.js
+++ b/spec/frontend/vue_shared/components/user_select_spec.js
@@ -9,6 +9,7 @@ import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphq
import searchUsersQueryOnMR from '~/graphql_shared/queries/users_search_with_mr_permissions.graphql';
import { IssuableType } from '~/issues/constants';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
+import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
import UserSelect from '~/vue_shared/components/user_select/user_select.vue';
import {
@@ -16,6 +17,8 @@ import {
searchResponseOnMR,
projectMembersResponse,
participantsQueryResponse,
+ mockUser1,
+ mockUser2,
} from 'jest/sidebar/mock_data';
const assignee = {
@@ -45,9 +48,14 @@ describe('User select dropdown', () => {
const findSearchField = () => wrapper.findComponent(GlSearchBoxByType);
const findParticipantsLoading = () => wrapper.find('[data-testid="loading-participants"]');
const findSelectedParticipants = () => wrapper.findAll('[data-testid="selected-participant"]');
+ const findSelectedParticipantByIndex = (index) =>
+ findSelectedParticipants().at(index).findComponent(SidebarParticipant);
const findUnselectedParticipants = () =>
wrapper.findAll('[data-testid="unselected-participant"]');
+ const findUnselectedParticipantByIndex = (index) =>
+ findUnselectedParticipants().at(index).findComponent(SidebarParticipant);
const findCurrentUser = () => wrapper.findAll('[data-testid="current-user"]');
+ const findIssuableAuthor = () => wrapper.findAll('[data-testid="issuable-author"]');
const findUnassignLink = () => wrapper.find('[data-testid="unassign"]');
const findEmptySearchResults = () => wrapper.find('[data-testid="empty-results"]');
@@ -136,6 +144,93 @@ describe('User select dropdown', () => {
expect(findCurrentUser().exists()).toBe(true);
});
+ it('does not render current user if user is not logged in', async () => {
+ createComponent({
+ props: {
+ currentUser: {},
+ },
+ });
+ await waitForPromises();
+
+ expect(findCurrentUser().exists()).toBe(false);
+ });
+
+ it('does not render issuable author if author is not passed as a prop', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findIssuableAuthor().exists()).toBe(false);
+ });
+
+ describe('when issuable author is passed as a prop', () => {
+ it('moves issuable author on top of assigned list, if author is assigned', async () => {
+ createComponent({
+ props: {
+ value: [assignee, mockUser2],
+ issuableAuthor: mockUser2,
+ },
+ });
+ await waitForPromises();
+
+ expect(findSelectedParticipantByIndex(0).props('user')).toEqual(mockUser2);
+ });
+
+ it('moves issuable author on top of assigned list after current user, if author and current user are assigned', async () => {
+ const currentUser = mockUser1;
+ const issuableAuthor = mockUser2;
+
+ createComponent({
+ props: {
+ value: [assignee, issuableAuthor, currentUser],
+ issuableAuthor,
+ currentUser,
+ },
+ });
+ await waitForPromises();
+
+ expect(findSelectedParticipantByIndex(0).props('user')).toEqual(currentUser);
+ expect(findSelectedParticipantByIndex(1).props('user')).toEqual(issuableAuthor);
+ });
+
+ it('moves issuable author on top of unassigned list, if author is unassigned project member', async () => {
+ createComponent({
+ props: {
+ issuableAuthor: mockUser2,
+ },
+ });
+ await waitForPromises();
+
+ expect(findUnselectedParticipantByIndex(0).props('user')).toEqual(mockUser2);
+ });
+
+ it('moves issuable author on top of unassigned list after current user, if author and current user are unassigned project members', async () => {
+ const currentUser = mockUser2;
+ const issuableAuthor = mockUser1;
+
+ createComponent({
+ props: {
+ issuableAuthor,
+ currentUser,
+ },
+ });
+ await waitForPromises();
+
+ expect(findUnselectedParticipantByIndex(0).props('user')).toEqual(currentUser);
+ expect(findUnselectedParticipantByIndex(1).props('user')).toMatchObject(issuableAuthor);
+ });
+
+ it('displays author in a designated position if author is not assigned and not a project member', async () => {
+ createComponent({
+ props: {
+ issuableAuthor: assignee,
+ },
+ });
+ await waitForPromises();
+
+ expect(findIssuableAuthor().exists()).toBe(true);
+ });
+ });
+
it('displays correct amount of selected users', async () => {
createComponent({
props: {
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 040461f6be4..a0b868d1d52 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -3,7 +3,7 @@ import { nextTick } from 'vue';
import ActionsButton from '~/vue_shared/components/actions_button.vue';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-import WebIdeLink from '~/vue_shared/components/web_ide_link.vue';
+import WebIdeLink, { i18n } from '~/vue_shared/components/web_ide_link.vue';
import ConfirmForkModal from '~/vue_shared/components/confirm_fork_modal.vue';
import { stubComponent } from 'helpers/stub_component';
@@ -37,8 +37,8 @@ const ACTION_EDIT_CONFIRM_FORK = {
const ACTION_WEB_IDE = {
href: TEST_WEB_IDE_URL,
key: 'webide',
- secondaryText: 'Quickly and easily edit multiple files in your project.',
- tooltip: '',
+ secondaryText: i18n.webIdeText,
+ tooltip: i18n.webIdeTooltip,
text: 'Web IDE',
attrs: {
'data-qa-selector': 'web_ide_button',
@@ -108,8 +108,8 @@ describe('Web IDE link component', () => {
wrapper.destroy();
});
- const findActionsButton = () => wrapper.find(ActionsButton);
- const findLocalStorageSync = () => wrapper.find(LocalStorageSync);
+ const findActionsButton = () => wrapper.findComponent(ActionsButton);
+ const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
const findModal = () => wrapper.findComponent(GlModal);
const findForkConfirmModal = () => wrapper.findComponent(ConfirmForkModal);
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
index 81362edaf37..7b0f0f7e344 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_create_root_spec.js
@@ -51,11 +51,11 @@ describe('IssuableCreateRoot', () => {
});
it('renders issuable-form component', () => {
- expect(wrapper.find(IssuableForm).exists()).toBe(true);
+ expect(wrapper.findComponent(IssuableForm).exists()).toBe(true);
});
it('renders contents for slot "actions" within issuable-form component', () => {
- const buttonEl = wrapper.find(IssuableForm).find('button.js-issuable-save');
+ const buttonEl = wrapper.findComponent(IssuableForm).find('button.js-issuable-save');
expect(buttonEl.exists()).toBe(true);
expect(buttonEl.text()).toBe('Submit issuable');
diff --git a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
index cbfd05e7903..f98e7a678f4 100644
--- a/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/create/components/issuable_form_spec.js
@@ -65,9 +65,9 @@ describe('IssuableForm', () => {
expect(titleFieldEl.exists()).toBe(true);
expect(titleFieldEl.find('label').text()).toBe('Title');
- expect(titleFieldEl.find(GlFormInput).exists()).toBe(true);
- expect(titleFieldEl.find(GlFormInput).attributes('placeholder')).toBe('Title');
- expect(titleFieldEl.find(GlFormInput).attributes('autofocus')).toBe('true');
+ expect(titleFieldEl.findComponent(GlFormInput).exists()).toBe(true);
+ expect(titleFieldEl.findComponent(GlFormInput).attributes('placeholder')).toBe('Title');
+ expect(titleFieldEl.findComponent(GlFormInput).attributes('autofocus')).toBe('true');
});
it('renders issuable description input field', () => {
@@ -75,8 +75,8 @@ describe('IssuableForm', () => {
expect(descriptionFieldEl.exists()).toBe(true);
expect(descriptionFieldEl.find('label').text()).toBe('Description');
- expect(descriptionFieldEl.find(MarkdownField).exists()).toBe(true);
- expect(descriptionFieldEl.find(MarkdownField).props()).toMatchObject({
+ expect(descriptionFieldEl.findComponent(MarkdownField).exists()).toBe(true);
+ expect(descriptionFieldEl.findComponent(MarkdownField).props()).toMatchObject({
markdownPreviewPath: wrapper.vm.descriptionPreviewPath,
markdownDocsPath: wrapper.vm.descriptionHelpPath,
addSpacingClasses: false,
@@ -94,8 +94,8 @@ describe('IssuableForm', () => {
expect(labelsSelectEl.exists()).toBe(true);
expect(labelsSelectEl.find('label').text()).toBe('Labels');
- expect(labelsSelectEl.find(LabelsSelect).exists()).toBe(true);
- expect(labelsSelectEl.find(LabelsSelect).props()).toMatchObject({
+ expect(labelsSelectEl.findComponent(LabelsSelect).exists()).toBe(true);
+ expect(labelsSelectEl.findComponent(LabelsSelect).props()).toMatchObject({
allowLabelEdit: true,
allowLabelCreate: true,
allowMultiselect: true,
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
index 80f14dffd08..f55d3156581 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_item_spec.js
@@ -3,6 +3,7 @@ import { nextTick } from 'vue';
import { useFakeDate } from 'helpers/fake_date';
import { shallowMountExtended as shallowMount } from 'helpers/vue_test_utils_helper';
import IssuableItem from '~/vue_shared/issuable/list/components/issuable_item.vue';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
import IssuableAssignees from '~/issuable/components/issue_assignees.vue';
import { mockIssuable, mockRegularLabel } from '../mock_data';
@@ -13,6 +14,7 @@ const createComponent = ({
issuable = mockIssuable,
showCheckbox = true,
slots = {},
+ showWorkItemTypeIcon = false,
} = {}) =>
shallowMount(IssuableItem, {
propsData: {
@@ -21,6 +23,7 @@ const createComponent = ({
issuable,
showDiscussions: true,
showCheckbox,
+ showWorkItemTypeIcon,
},
slots,
stubs: {
@@ -40,6 +43,7 @@ describe('IssuableItem', () => {
let wrapper;
const findTimestampWrapper = () => wrapper.find('[data-testid="issuable-timestamp"]');
+ const findWorkItemTypeIcon = () => wrapper.findComponent(WorkItemTypeIcon);
beforeEach(() => {
gon.gitlab_url = MOCK_GITLAB_URL;
@@ -273,9 +277,9 @@ describe('IssuableItem', () => {
const titleEl = wrapper.find('[data-testid="issuable-title"]');
expect(titleEl.exists()).toBe(true);
- expect(titleEl.find(GlLink).attributes('href')).toBe(expectedHref);
- expect(titleEl.find(GlLink).attributes('target')).toBe(expectedTarget);
- expect(titleEl.find(GlLink).text()).toBe(mockIssuable.title);
+ expect(titleEl.findComponent(GlLink).attributes('href')).toBe(expectedHref);
+ expect(titleEl.findComponent(GlLink).attributes('target')).toBe(expectedTarget);
+ expect(titleEl.findComponent(GlLink).text()).toBe(mockIssuable.title);
},
);
@@ -286,8 +290,8 @@ describe('IssuableItem', () => {
await nextTick();
- expect(wrapper.find(GlFormCheckbox).exists()).toBe(true);
- expect(wrapper.find(GlFormCheckbox).attributes('checked')).not.toBeDefined();
+ expect(wrapper.findComponent(GlFormCheckbox).exists()).toBe(true);
+ expect(wrapper.findComponent(GlFormCheckbox).attributes('checked')).not.toBeDefined();
wrapper.setProps({
checked: true,
@@ -295,7 +299,7 @@ describe('IssuableItem', () => {
await nextTick();
- expect(wrapper.find(GlFormCheckbox).attributes('checked')).toBe('true');
+ expect(wrapper.findComponent(GlFormCheckbox).attributes('checked')).toBe('true');
});
it('renders issuable title with `target` set as "_blank" when issuable.webUrl is external', async () => {
@@ -308,9 +312,9 @@ describe('IssuableItem', () => {
await nextTick();
- expect(wrapper.find('[data-testid="issuable-title"]').find(GlLink).attributes('target')).toBe(
- '_blank',
- );
+ expect(
+ wrapper.find('[data-testid="issuable-title"]').findComponent(GlLink).attributes('target'),
+ ).toBe('_blank');
});
it('renders issuable confidential icon when issuable is confidential', async () => {
@@ -323,7 +327,7 @@ describe('IssuableItem', () => {
await nextTick();
- const confidentialEl = wrapper.find('[data-testid="issuable-title"]').find(GlIcon);
+ const confidentialEl = wrapper.find('[data-testid="issuable-title"]').findComponent(GlIcon);
expect(confidentialEl.exists()).toBe(true);
expect(confidentialEl.props('name')).toBe('eye-slash');
@@ -349,11 +353,23 @@ describe('IssuableItem', () => {
wrapper = createComponent();
const taskStatus = wrapper.find('[data-testid="task-status"]');
- const expected = `${mockIssuable.taskCompletionStatus.completedCount} of ${mockIssuable.taskCompletionStatus.count} tasks completed`;
+ const expected = `${mockIssuable.taskCompletionStatus.completedCount} of ${mockIssuable.taskCompletionStatus.count} checklist items completed`;
expect(taskStatus.text()).toBe(expected);
});
+ it('does not renders work item type icon by default', () => {
+ wrapper = createComponent();
+
+ expect(findWorkItemTypeIcon().exists()).toBe(false);
+ });
+
+ it('renders work item type icon when props passed', () => {
+ wrapper = createComponent({ showWorkItemTypeIcon: true });
+
+ expect(findWorkItemTypeIcon().props('workItemType')).toBe(mockIssuable.type);
+ });
+
it('renders issuable reference', () => {
wrapper = createComponent();
@@ -440,7 +456,7 @@ describe('IssuableItem', () => {
it('renders gl-label component for each label present within `issuable` prop', () => {
wrapper = createComponent();
- const labelsEl = wrapper.findAll(GlLabel);
+ const labelsEl = wrapper.findAllComponents(GlLabel);
expect(labelsEl.exists()).toBe(true);
expect(labelsEl).toHaveLength(mockLabels.length);
@@ -476,18 +492,18 @@ describe('IssuableItem', () => {
const discussionsEl = wrapper.find('[data-testid="issuable-discussions"]');
expect(discussionsEl.exists()).toBe(true);
- expect(discussionsEl.find(GlLink).attributes()).toMatchObject({
+ expect(discussionsEl.findComponent(GlLink).attributes()).toMatchObject({
title: 'Comments',
href: `${mockIssuable.webUrl}#notes`,
});
- expect(discussionsEl.find(GlIcon).props('name')).toBe('comments');
- expect(discussionsEl.find(GlLink).text()).toContain('2');
+ expect(discussionsEl.findComponent(GlIcon).props('name')).toBe('comments');
+ expect(discussionsEl.findComponent(GlLink).text()).toContain('2');
});
it('renders issuable-assignees component', () => {
wrapper = createComponent();
- const assigneesEl = wrapper.find(IssuableAssignees);
+ const assigneesEl = wrapper.findComponent(IssuableAssignees);
expect(assigneesEl.exists()).toBe(true);
expect(assigneesEl.props()).toMatchObject({
diff --git a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
index 50e79dbe589..0c53f599d55 100644
--- a/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/list/components/issuable_list_root_spec.js
@@ -359,7 +359,7 @@ describe('IssuableListRoot', () => {
findIssuableTabs().vm.$emit('click');
- expect(wrapper.emitted('click-tab')).toBeTruthy();
+ expect(wrapper.emitted('click-tab')).toHaveLength(1);
});
it('sets all issuables as checked when filtered-search-bar component emits `checked-input` event', () => {
@@ -369,7 +369,7 @@ describe('IssuableListRoot', () => {
searchEl.vm.$emit('checked-input', true);
- expect(searchEl.emitted('checked-input')).toBeTruthy();
+ expect(searchEl.emitted('checked-input')).toHaveLength(1);
expect(searchEl.emitted('checked-input').length).toBe(1);
expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
@@ -384,9 +384,9 @@ describe('IssuableListRoot', () => {
const searchEl = findFilteredSearchBar();
searchEl.vm.$emit('onFilter');
- expect(wrapper.emitted('filter')).toBeTruthy();
+ expect(wrapper.emitted('filter')).toHaveLength(1);
searchEl.vm.$emit('onSort');
- expect(wrapper.emitted('sort')).toBeTruthy();
+ expect(wrapper.emitted('sort')).toHaveLength(1);
});
it('sets an issuable as checked when issuable-item component emits `checked-input` event', () => {
@@ -396,7 +396,7 @@ describe('IssuableListRoot', () => {
issuableItem.vm.$emit('checked-input', true);
- expect(issuableItem.emitted('checked-input')).toBeTruthy();
+ expect(issuableItem.emitted('checked-input')).toHaveLength(1);
expect(issuableItem.emitted('checked-input').length).toBe(1);
expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
@@ -425,7 +425,7 @@ describe('IssuableListRoot', () => {
wrapper = createComponent({ data, props: { showPaginationControls: true } });
findGlPagination().vm.$emit('input');
- expect(wrapper.emitted('page-change')).toBeTruthy();
+ expect(wrapper.emitted('page-change')).toHaveLength(1);
});
it.each`
diff --git a/spec/frontend/vue_shared/issuable/list/mock_data.js b/spec/frontend/vue_shared/issuable/list/mock_data.js
index 8640f4a2cd5..b67bd0f42fe 100644
--- a/spec/frontend/vue_shared/issuable/list/mock_data.js
+++ b/spec/frontend/vue_shared/issuable/list/mock_data.js
@@ -57,6 +57,7 @@ export const mockIssuable = {
count: 2,
completedCount: 1,
},
+ type: 'issue',
};
export const mockIssuables = [
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
index 7c582360637..39a76a51191 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_body_spec.js
@@ -154,7 +154,7 @@ describe('IssuableBody', () => {
describe('template', () => {
it('renders issuable-title component', () => {
- const titleEl = wrapper.find(IssuableTitle);
+ const titleEl = wrapper.findComponent(IssuableTitle);
expect(titleEl.exists()).toBe(true);
expect(titleEl.props()).toMatchObject({
@@ -165,7 +165,7 @@ describe('IssuableBody', () => {
});
it('renders issuable-description component', () => {
- const descriptionEl = wrapper.find(IssuableDescription);
+ const descriptionEl = wrapper.findComponent(IssuableDescription);
expect(descriptionEl.exists()).toBe(true);
expect(descriptionEl.props('issuable')).toEqual(issuableBodyProps.issuable);
@@ -184,7 +184,7 @@ describe('IssuableBody', () => {
await nextTick();
- const editFormEl = wrapper.find(IssuableEditForm);
+ const editFormEl = wrapper.findComponent(IssuableEditForm);
expect(editFormEl.exists()).toBe(true);
expect(editFormEl.props()).toMatchObject({
issuable: issuableBodyProps.issuable,
@@ -198,7 +198,7 @@ describe('IssuableBody', () => {
describe('events', () => {
it('component emits `edit-issuable` event bubbled via issuable-title', () => {
- const issuableTitle = wrapper.find(IssuableTitle);
+ const issuableTitle = wrapper.findComponent(IssuableTitle);
issuableTitle.vm.$emit('edit-issuable');
@@ -223,7 +223,7 @@ describe('IssuableBody', () => {
await nextTick();
- const issuableEditForm = wrapper.find(IssuableEditForm);
+ const issuableEditForm = wrapper.findComponent(IssuableEditForm);
issuableEditForm.vm.$emit(eventName, eventObj, issuableMeta);
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
index d3e484cf913..d843da4da5b 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
@@ -124,7 +124,7 @@ describe('IssuableEditForm', () => {
const titleInputEl = wrapper.find('[data-testid="title"]');
expect(titleInputEl.exists()).toBe(true);
- expect(titleInputEl.find(GlFormInput).attributes()).toMatchObject({
+ expect(titleInputEl.findComponent(GlFormInput).attributes()).toMatchObject({
'aria-label': 'Title',
placeholder: 'Title',
});
@@ -134,7 +134,7 @@ describe('IssuableEditForm', () => {
const descriptionEl = wrapper.find('[data-testid="description"]');
expect(descriptionEl.exists()).toBe(true);
- expect(descriptionEl.find(MarkdownField).props()).toMatchObject({
+ expect(descriptionEl.findComponent(MarkdownField).props()).toMatchObject({
markdownPreviewPath: issuableEditFormProps.descriptionPreviewPath,
markdownDocsPath: issuableEditFormProps.descriptionHelpPath,
enableAutocomplete: issuableEditFormProps.enableAutocomplete,
@@ -161,7 +161,7 @@ describe('IssuableEditForm', () => {
};
it('component emits `keydown-title` event with event object and issuableMeta params via gl-form-input', async () => {
- const titleInputEl = wrapper.find(GlFormInput);
+ const titleInputEl = wrapper.findComponent(GlFormInput);
titleInputEl.vm.$emit('keydown', eventObj, 'title');
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
index e00bb184535..6a8b9ef77a9 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_header_spec.js
@@ -86,7 +86,7 @@ describe('IssuableHeader', () => {
const blockedEl = wrapper.findByTestId('blocked');
expect(blockedEl.exists()).toBe(true);
- expect(blockedEl.find(GlIcon).props('name')).toBe('lock');
+ expect(blockedEl.findComponent(GlIcon).props('name')).toBe('lock');
});
it('renders confidential icon when issuable is confidential', async () => {
@@ -97,7 +97,7 @@ describe('IssuableHeader', () => {
const confidentialEl = wrapper.findByTestId('confidential');
expect(confidentialEl.exists()).toBe(true);
- expect(confidentialEl.find(GlIcon).props('name')).toBe('eye-slash');
+ expect(confidentialEl.findComponent(GlIcon).props('name')).toBe('eye-slash');
});
it('renders issuable author avatar', () => {
@@ -113,19 +113,19 @@ describe('IssuableHeader', () => {
const avatarEl = wrapper.findByTestId('avatar');
expect(avatarEl.exists()).toBe(true);
expect(avatarEl.attributes()).toMatchObject(avatarElAttrs);
- expect(avatarEl.find(GlAvatarLabeled).attributes()).toMatchObject({
+ expect(avatarEl.findComponent(GlAvatarLabeled).attributes()).toMatchObject({
size: '24',
src: avatarUrl,
label: name,
});
- expect(avatarEl.find(GlAvatarLabeled).find(GlIcon).exists()).toBe(false);
+ expect(avatarEl.findComponent(GlAvatarLabeled).findComponent(GlIcon).exists()).toBe(false);
});
it('renders task status text when `taskCompletionStatus` prop is defined', () => {
createComponent();
expect(findTaskStatusEl().exists()).toBe(true);
- expect(findTaskStatusEl().text()).toContain('0 of 5 tasks completed');
+ expect(findTaskStatusEl().text()).toContain('0 of 5 checklist items completed');
});
it('does not render task status text when tasks count is 0', () => {
@@ -172,7 +172,7 @@ describe('IssuableHeader', () => {
);
const avatarEl = wrapper.findComponent(GlAvatarLabeled);
- const icon = avatarEl.find(GlIcon);
+ const icon = avatarEl.findComponent(GlIcon);
expect(icon.exists()).toBe(true);
expect(icon.props('name')).toBe('external-link');
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
index f56064ed8e1..edfd55c8bb4 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_show_root_spec.js
@@ -63,7 +63,7 @@ describe('IssuableShowRoot', () => {
});
it('renders issuable-header component', () => {
- const issuableHeader = wrapper.find(IssuableHeader);
+ const issuableHeader = wrapper.findComponent(IssuableHeader);
expect(issuableHeader.exists()).toBe(true);
expect(issuableHeader.props()).toMatchObject({
@@ -84,7 +84,7 @@ describe('IssuableShowRoot', () => {
});
it('renders issuable-body component', () => {
- const issuableBody = wrapper.find(IssuableBody);
+ const issuableBody = wrapper.findComponent(IssuableBody);
expect(issuableBody.exists()).toBe(true);
expect(issuableBody.props()).toMatchObject({
@@ -99,38 +99,38 @@ describe('IssuableShowRoot', () => {
});
it('renders issuable-sidebar component', () => {
- const issuableSidebar = wrapper.find(IssuableSidebar);
+ const issuableSidebar = wrapper.findComponent(IssuableSidebar);
expect(issuableSidebar.exists()).toBe(true);
});
describe('events', () => {
it('component emits `edit-issuable` event bubbled via issuable-body', () => {
- const issuableBody = wrapper.find(IssuableBody);
+ const issuableBody = wrapper.findComponent(IssuableBody);
issuableBody.vm.$emit('edit-issuable');
- expect(wrapper.emitted('edit-issuable')).toBeTruthy();
+ expect(wrapper.emitted('edit-issuable')).toHaveLength(1);
});
it('component emits `task-list-update-success` event bubbled via issuable-body', () => {
- const issuableBody = wrapper.find(IssuableBody);
+ const issuableBody = wrapper.findComponent(IssuableBody);
const eventParam = {
foo: 'bar',
};
issuableBody.vm.$emit('task-list-update-success', eventParam);
- expect(wrapper.emitted('task-list-update-success')).toBeTruthy();
+ expect(wrapper.emitted('task-list-update-success')).toHaveLength(1);
expect(wrapper.emitted('task-list-update-success')[0]).toEqual([eventParam]);
});
it('component emits `task-list-update-failure` event bubbled via issuable-body', () => {
- const issuableBody = wrapper.find(IssuableBody);
+ const issuableBody = wrapper.findComponent(IssuableBody);
issuableBody.vm.$emit('task-list-update-failure');
- expect(wrapper.emitted('task-list-update-failure')).toBeTruthy();
+ expect(wrapper.emitted('task-list-update-failure')).toHaveLength(1);
});
it.each(['keydown-title', 'keydown-description'])(
@@ -145,11 +145,11 @@ describe('IssuableShowRoot', () => {
issuableDescription: 'foobar',
};
- const issuableBody = wrapper.find(IssuableBody);
+ const issuableBody = wrapper.findComponent(IssuableBody);
issuableBody.vm.$emit(eventName, eventObj, issuableMeta);
- expect(wrapper.emitted(eventName)).toBeTruthy();
+ expect(wrapper.emitted()).toHaveProperty(eventName);
expect(wrapper.emitted(eventName)[0]).toMatchObject([eventObj, issuableMeta]);
},
);
diff --git a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
index 4b75da0b126..5f2b13a79c9 100644
--- a/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
+++ b/spec/frontend/vue_shared/security_reports/components/security_report_download_dropdown_spec.js
@@ -12,8 +12,8 @@ describe('SecurityReportDownloadDropdown component', () => {
});
};
- const findDropdown = () => wrapper.find(GlDropdown);
- const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
index 68a97103d3a..a9651cf8bac 100644
--- a/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
+++ b/spec/frontend/vue_shared/security_reports/security_reports_app_spec.js
@@ -70,8 +70,8 @@ describe('Security reports app', () => {
return createMockApollo(requestHandlers);
};
- const findDownloadDropdown = () => wrapper.find(SecurityReportDownloadDropdown);
- const findHelpIconComponent = () => wrapper.find(HelpIcon);
+ const findDownloadDropdown = () => wrapper.findComponent(SecurityReportDownloadDropdown);
+ const findHelpIconComponent = () => wrapper.findComponent(HelpIcon);
afterEach(() => {
wrapper.destroy();
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index 945727cd664..de5a814d3e7 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -64,7 +64,7 @@ describe('App', () => {
buildWrapper();
wrapper.vm.$store.state.features = [
- { title: 'Whats New Drawer', url: 'www.url.com', release: 3.11 },
+ { name: 'Whats New Drawer', documentation_link: 'www.url.com', release: 3.11 },
];
wrapper.vm.$store.state.drawerBodyHeight = MOCK_DRAWER_BODY_HEIGHT;
await nextTick();
@@ -115,7 +115,7 @@ describe('App', () => {
it('renders features when provided via ajax', () => {
expect(actions.fetchItems).toHaveBeenCalled();
- expect(wrapper.find('[data-test-id="feature-title"]').text()).toBe('Whats New Drawer');
+ expect(wrapper.find('[data-test-id="feature-name"]').text()).toBe('Whats New Drawer');
});
it('send an event when feature item is clicked', () => {
diff --git a/spec/frontend/whats_new/components/feature_spec.js b/spec/frontend/whats_new/components/feature_spec.js
index b6627c257ff..099054bf8ca 100644
--- a/spec/frontend/whats_new/components/feature_spec.js
+++ b/spec/frontend/whats_new/components/feature_spec.js
@@ -6,14 +6,15 @@ describe("What's new single feature", () => {
let wrapper;
const exampleFeature = {
- title: 'Compliance pipeline configurations',
- body:
+ name: 'Compliance pipeline configurations',
+ description:
'<p data-testid="body-content">We are thrilled to announce that it is now possible to define enforceable pipelines that will run for any project assigned a corresponding <a href="https://en.wikipedia.org/wiki/Compliance_(psychology)" target="_blank" rel="noopener noreferrer" onload="alert(xss)">compliance</a> framework.</p>',
stage: 'Manage',
'self-managed': true,
'gitlab-com': true,
- packages: ['Ultimate'],
- url: 'https://docs.gitlab.com/ee/user/project/settings/#compliance-pipeline-configuration',
+ available_in: ['Ultimate'],
+ documentation_link:
+ 'https://docs.gitlab.com/ee/user/project/settings/#compliance-pipeline-configuration',
image_url: 'https://img.youtube.com/vi/upLJ_equomw/hqdefault.jpg',
published_at: '2021-04-22T00:00:00.000Z',
release: '13.11',
diff --git a/spec/frontend/work_items/components/item_state_spec.js b/spec/frontend/work_items/components/item_state_spec.js
index 79b76f3c061..c3cc2fbc556 100644
--- a/spec/frontend/work_items/components/item_state_spec.js
+++ b/spec/frontend/work_items/components/item_state_spec.js
@@ -1,3 +1,4 @@
+import { GlFormSelect } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { STATE_OPEN, STATE_CLOSED } from '~/work_items/constants';
import ItemState from '~/work_items/components/item_state.vue';
@@ -6,6 +7,7 @@ describe('ItemState', () => {
let wrapper;
const findLabel = () => wrapper.find('label').text();
+ const findFormSelect = () => wrapper.findComponent(GlFormSelect);
const selectedValue = () => wrapper.find('option:checked').element.value;
const clickOpen = () => wrapper.findAll('option').at(0).setSelected();
@@ -51,4 +53,18 @@ describe('ItemState', () => {
expect(wrapper.emitted('changed')).toBeUndefined();
});
+
+ describe('form select disabled prop', () => {
+ describe.each`
+ description | disabled | value
+ ${'when not disabled'} | ${false} | ${undefined}
+ ${'when disabled'} | ${true} | ${'disabled'}
+ `('$description', ({ disabled, value }) => {
+ it(`renders form select component with disabled=${value}`, () => {
+ createComponent({ disabled });
+
+ expect(findFormSelect().attributes('disabled')).toBe(value);
+ });
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/item_title_spec.js b/spec/frontend/work_items/components/item_title_spec.js
index a55f448c9a2..de20369eb1b 100644
--- a/spec/frontend/work_items/components/item_title_spec.js
+++ b/spec/frontend/work_items/components/item_title_spec.js
@@ -37,7 +37,7 @@ describe('ItemTitle', () => {
disabled: true,
});
- expect(wrapper.classes()).toContain('gl-cursor-not-allowed');
+ expect(wrapper.classes()).toContain('gl-cursor-text');
expect(findInputEl().attributes('contenteditable')).toBe('false');
});
diff --git a/spec/frontend/work_items/components/work_item_actions_spec.js b/spec/frontend/work_items/components/work_item_actions_spec.js
index 137a0a7326d..a1f1d47ab90 100644
--- a/spec/frontend/work_items/components/work_item_actions_spec.js
+++ b/spec/frontend/work_items/components/work_item_actions_spec.js
@@ -1,5 +1,5 @@
-import { GlDropdownItem, GlModal } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlModal } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import WorkItemActions from '~/work_items/components/work_item_actions.vue';
describe('WorkItemActions component', () => {
@@ -7,12 +7,19 @@ describe('WorkItemActions component', () => {
let glModalDirective;
const findModal = () => wrapper.findComponent(GlModal);
- const findDeleteButton = () => wrapper.findComponent(GlDropdownItem);
+ const findConfidentialityToggleButton = () =>
+ wrapper.findByTestId('confidentiality-toggle-action');
+ const findDeleteButton = () => wrapper.findByTestId('delete-action');
- const createComponent = ({ canDelete = true } = {}) => {
+ const createComponent = ({
+ canUpdate = true,
+ canDelete = true,
+ isConfidential = false,
+ isParentConfidential = false,
+ } = {}) => {
glModalDirective = jest.fn();
- wrapper = shallowMount(WorkItemActions, {
- propsData: { workItemId: '123', canDelete },
+ wrapper = shallowMountExtended(WorkItemActions, {
+ propsData: { workItemId: '123', canUpdate, canDelete, isConfidential, isParentConfidential },
directives: {
glModal: {
bind(_, { value }) {
@@ -34,27 +41,69 @@ describe('WorkItemActions component', () => {
expect(findModal().props('visible')).toBe(false);
});
- it('shows confirm modal when clicking Delete work item', () => {
+ it('renders dropdown actions', () => {
createComponent();
- findDeleteButton().vm.$emit('click');
-
- expect(glModalDirective).toHaveBeenCalled();
+ expect(findConfidentialityToggleButton().exists()).toBe(true);
+ expect(findDeleteButton().exists()).toBe(true);
});
- it('emits event when clicking OK button', () => {
- createComponent();
+ describe('toggle confidentiality action', () => {
+ it.each`
+ isConfidential | buttonText
+ ${true} | ${'Turn off confidentiality'}
+ ${false} | ${'Turn on confidentiality'}
+ `(
+ 'renders confidentiality toggle button with text "$buttonText"',
+ ({ isConfidential, buttonText }) => {
+ createComponent({ isConfidential });
+
+ expect(findConfidentialityToggleButton().text()).toBe(buttonText);
+ },
+ );
+
+ it('emits `toggleWorkItemConfidentiality` event when clicked', () => {
+ createComponent();
- findModal().vm.$emit('ok');
+ findConfidentialityToggleButton().vm.$emit('click');
- expect(wrapper.emitted('deleteWorkItem')).toEqual([[]]);
+ expect(wrapper.emitted('toggleWorkItemConfidentiality')[0]).toEqual([true]);
+ });
+
+ it.each`
+ props | propName | value
+ ${{ isParentConfidential: true }} | ${'isParentConfidential'} | ${true}
+ ${{ canUpdate: false }} | ${'canUpdate'} | ${false}
+ `('does not render when $propName is $value', ({ props }) => {
+ createComponent(props);
+
+ expect(findConfidentialityToggleButton().exists()).toBe(false);
+ });
});
- it('does not render when canDelete is false', () => {
- createComponent({
- canDelete: false,
+ describe('delete action', () => {
+ it('shows confirm modal when clicked', () => {
+ createComponent();
+
+ findDeleteButton().vm.$emit('click');
+
+ expect(glModalDirective).toHaveBeenCalled();
+ });
+
+ it('emits event when clicking OK button', () => {
+ createComponent();
+
+ findModal().vm.$emit('ok');
+
+ expect(wrapper.emitted('deleteWorkItem')).toEqual([[]]);
});
- expect(wrapper.html()).toBe('');
+ it('does not render when canDelete is false', () => {
+ createComponent({
+ canDelete: false,
+ });
+
+ expect(wrapper.findByTestId('delete-action').exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_assignees_spec.js b/spec/frontend/work_items/components/work_item_assignees_spec.js
index 299949a4baa..f0ef8aee7a9 100644
--- a/spec/frontend/work_items/components/work_item_assignees_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_spec.js
@@ -5,14 +5,15 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import { mockTracking } from 'helpers/tracking_helper';
-import { stripTypenames } from 'helpers/graphql_helpers';
import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import userSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
import currentUserQuery from '~/graphql_shared/queries/current_user.query.graphql';
+import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import { i18n, TASK_TYPE_NAME, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
-import { temporaryConfig, resolvers } from '~/work_items/graphql/provider';
+import { temporaryConfig } from '~/work_items/graphql/provider';
import {
projectMembersResponseWithCurrentUser,
mockAssignees,
@@ -20,6 +21,7 @@ import {
currentUserResponse,
currentUserNullResponse,
projectMembersResponseWithoutCurrentUser,
+ updateWorkItemMutationResponse,
} from '../mock_data';
Vue.use(VueApollo);
@@ -33,6 +35,7 @@ describe('WorkItemAssignees component', () => {
const findAssigneeLinks = () => wrapper.findAllComponents(GlLink);
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findInviteMembersTrigger = () => wrapper.findComponent(InviteMembersTrigger);
const findEmptyState = () => wrapper.findByTestId('empty-state');
const findAssignSelfButton = () => wrapper.findByTestId('assign-self');
@@ -43,6 +46,9 @@ describe('WorkItemAssignees component', () => {
.mockResolvedValue(projectMembersResponseWithCurrentUser);
const successCurrentUserQueryHandler = jest.fn().mockResolvedValue(currentUserResponse);
const noCurrentUserQueryHandler = jest.fn().mockResolvedValue(currentUserNullResponse);
+ const successUpdateWorkItemMutationHandler = jest
+ .fn()
+ .mockResolvedValue(updateWorkItemMutationResponse);
const errorHandler = jest.fn().mockRejectedValue('Houston, we have a problem');
@@ -50,15 +56,18 @@ describe('WorkItemAssignees component', () => {
assignees = mockAssignees,
searchQueryHandler = successSearchQueryHandler,
currentUserQueryHandler = successCurrentUserQueryHandler,
+ updateWorkItemMutationHandler = successUpdateWorkItemMutationHandler,
allowsMultipleAssignees = true,
+ canInviteMembers = false,
canUpdate = true,
} = {}) => {
const apolloProvider = createMockApollo(
[
[userSearchQuery, searchQueryHandler],
[currentUserQuery, currentUserQueryHandler],
+ [updateWorkItemMutation, updateWorkItemMutationHandler],
],
- resolvers,
+ {},
{
typePolicies: temporaryConfig.cacheConfig.typePolicies,
},
@@ -82,6 +91,7 @@ describe('WorkItemAssignees component', () => {
allowsMultipleAssignees,
workItemType: TASK_TYPE_NAME,
canUpdate,
+ canInviteMembers,
},
attachTo: document.body,
apolloProvider,
@@ -120,15 +130,6 @@ describe('WorkItemAssignees component', () => {
expect(findTokenSelector().element.contains(document.activeElement)).toBe(true);
});
- it('calls a mutation on clicking outside the token selector', async () => {
- createComponent();
- findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
- findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
- await waitForPromises();
-
- expect(findTokenSelector().props('selectedTokens')).toEqual([mockAssignees[0]]);
- });
-
it('passes `false` to `viewOnly` token selector prop if user can update assignees', () => {
createComponent();
@@ -141,6 +142,36 @@ describe('WorkItemAssignees component', () => {
expect(findTokenSelector().props('viewOnly')).toBe(true);
});
+ describe('when clicking outside the token selector', () => {
+ function arrange(args) {
+ createComponent(args);
+ findTokenSelector().vm.$emit('input', [mockAssignees[0]]);
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+ }
+
+ it('calls a mutation with correct variables', () => {
+ arrange({ assignees: [] });
+
+ expect(successUpdateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ assigneesWidget: { assigneeIds: [mockAssignees[0].id] },
+ id: 'gid://gitlab/WorkItem/1',
+ },
+ });
+ });
+
+ it('emits an error and resets assignees if mutation was rejected', async () => {
+ arrange({ updateWorkItemMutationHandler: errorHandler, assignees: [mockAssignees[1]] });
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.updateError]]);
+ expect(findTokenSelector().props('selectedTokens')).toEqual([
+ { ...mockAssignees[1], class: expect.anything() },
+ ]);
+ });
+ });
+
describe('when searching for users', () => {
beforeEach(() => {
createComponent();
@@ -204,7 +235,7 @@ describe('WorkItemAssignees component', () => {
expect(findTokenSelector().props('dropdownItems')).toHaveLength(2);
});
- it('should search for users with correct key after text input', async () => {
+ it('searches for users with correct key after text input', async () => {
const searchKey = 'Hello';
findTokenSelector().vm.$emit('focus');
@@ -225,6 +256,18 @@ describe('WorkItemAssignees component', () => {
expect(wrapper.emitted('error')).toEqual([[i18n.fetchError]]);
});
+ it('updates localAssignees when assignees prop is updated', async () => {
+ createComponent({ assignees: [] });
+
+ expect(findTokenSelector().props('selectedTokens')).toEqual([]);
+
+ await wrapper.setProps({ assignees: [mockAssignees[0]] });
+
+ expect(findTokenSelector().props('selectedTokens')).toEqual([
+ { ...mockAssignees[0], class: expect.anything() },
+ ]);
+ });
+
describe('when assigning to current user', () => {
it('does not show `Assign myself` button if current user is loading', () => {
createComponent();
@@ -261,23 +304,21 @@ describe('WorkItemAssignees component', () => {
expect(findAssignSelfButton().exists()).toBe(true);
});
- it('calls update work item assignees mutation with current user as a variable on button click', () => {
- // TODO: replace this test as soon as we have a real mutation implemented
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockImplementation(jest.fn());
-
+ it('calls update work item assignees mutation with current user as a variable on button click', async () => {
+ const { currentUser } = currentUserResponse.data;
findTokenSelector().trigger('mouseover');
findAssignSelfButton().vm.$emit('click', new MouseEvent('click'));
+ await nextTick();
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith(
- expect.objectContaining({
- variables: {
- input: {
- assignees: [stripTypenames(currentUserResponse.data.currentUser)],
- id: workItemId,
- },
+ expect(findTokenSelector().props('selectedTokens')).toMatchObject([currentUser]);
+ expect(successUpdateWorkItemMutationHandler).toHaveBeenCalledWith({
+ input: {
+ id: workItemId,
+ assigneesWidget: {
+ assigneeIds: [currentUser.id],
},
- }),
- );
+ },
+ });
});
});
@@ -286,9 +327,7 @@ describe('WorkItemAssignees component', () => {
await waitForPromises();
expect(findTokenSelector().props('dropdownItems')[0]).toEqual(
- expect.objectContaining({
- ...stripTypenames(currentUserResponse.data.currentUser),
- }),
+ expect.objectContaining(currentUserResponse.data.currentUser),
);
});
@@ -303,9 +342,10 @@ describe('WorkItemAssignees component', () => {
});
it('adds current user to the top of dropdown items', () => {
- expect(findTokenSelector().props('dropdownItems')[0]).toEqual(
- stripTypenames(currentUserResponse.data.currentUser),
- );
+ expect(findTokenSelector().props('dropdownItems')[0]).toEqual({
+ ...currentUserResponse.data.currentUser,
+ class: expect.anything(),
+ });
});
it('does not add current user if search is not empty', async () => {
@@ -313,7 +353,7 @@ describe('WorkItemAssignees component', () => {
await waitForPromises();
expect(findTokenSelector().props('dropdownItems')[0]).not.toEqual(
- stripTypenames(currentUserResponse.data.currentUser),
+ currentUserResponse.data.currentUser,
);
});
});
@@ -405,4 +445,18 @@ describe('WorkItemAssignees component', () => {
});
});
});
+
+ describe('invite members', () => {
+ it('does not render `Invite members` link if user has no permission to invite members', () => {
+ createComponent();
+
+ expect(findInviteMembersTrigger().exists()).toBe(false);
+ });
+
+ it('renders `Invite members` link if user has a permission to invite members', () => {
+ createComponent({ canInviteMembers: true });
+
+ expect(findInviteMembersTrigger().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index 70b1261bdb7..01891012f99 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -7,6 +7,13 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import deleteWorkItemFromTaskMutation from '~/work_items/graphql/delete_task_from_work_item.mutation.graphql';
+import deleteWorkItemMutation from '~/work_items/graphql/delete_work_item.mutation.graphql';
+import {
+ deleteWorkItemFromTaskMutationErrorResponse,
+ deleteWorkItemFromTaskMutationResponse,
+ deleteWorkItemMutationErrorResponse,
+ deleteWorkItemResponse,
+} from '../mock_data';
describe('WorkItemDetailModal component', () => {
let wrapper;
@@ -25,28 +32,38 @@ describe('WorkItemDetailModal component', () => {
},
};
+ const defaultPropsData = {
+ issueGid: 'gid://gitlab/WorkItem/1',
+ workItemId: 'gid://gitlab/WorkItem/2',
+ };
+
const findModal = () => wrapper.findComponent(GlModal);
const findAlert = () => wrapper.findComponent(GlAlert);
const findWorkItemDetail = () => wrapper.findComponent(WorkItemDetail);
- const createComponent = ({ workItemId = '1', issueGid = '2', error = false } = {}) => {
+ const createComponent = ({
+ lockVersion,
+ lineNumberStart,
+ lineNumberEnd,
+ error = false,
+ deleteWorkItemFromTaskMutationHandler = jest
+ .fn()
+ .mockResolvedValue(deleteWorkItemFromTaskMutationResponse),
+ deleteWorkItemMutationHandler = jest.fn().mockResolvedValue(deleteWorkItemResponse),
+ } = {}) => {
const apolloProvider = createMockApollo([
- [
- deleteWorkItemFromTaskMutation,
- jest.fn().mockResolvedValue({
- data: {
- workItemDeleteTask: {
- workItem: { id: 123, descriptionHtml: 'updated work item desc' },
- errors: [],
- },
- },
- }),
- ],
+ [deleteWorkItemFromTaskMutation, deleteWorkItemFromTaskMutationHandler],
+ [deleteWorkItemMutation, deleteWorkItemMutationHandler],
]);
wrapper = shallowMount(WorkItemDetailModal, {
apolloProvider,
- propsData: { workItemId, issueGid },
+ propsData: {
+ ...defaultPropsData,
+ lockVersion,
+ lineNumberStart,
+ lineNumberEnd,
+ },
data() {
return {
error,
@@ -67,8 +84,8 @@ describe('WorkItemDetailModal component', () => {
expect(findWorkItemDetail().props()).toEqual({
isModal: true,
- workItemId: '1',
- workItemParentId: '2',
+ workItemId: defaultPropsData.workItemId,
+ workItemParentId: defaultPropsData.issueGid,
});
});
@@ -109,16 +126,85 @@ describe('WorkItemDetailModal component', () => {
});
describe('delete work item', () => {
- it('emits workItemDeleted and closes modal', async () => {
- createComponent();
- const newDesc = 'updated work item desc';
-
- findWorkItemDetail().vm.$emit('deleteWorkItem');
-
- await waitForPromises();
+ describe('when there is task data', () => {
+ it('emits workItemDeleted and closes modal', async () => {
+ const mutationMock = jest.fn().mockResolvedValue(deleteWorkItemFromTaskMutationResponse);
+ createComponent({
+ lockVersion: 1,
+ lineNumberStart: '3',
+ lineNumberEnd: '3',
+ deleteWorkItemFromTaskMutationHandler: mutationMock,
+ });
+ const newDesc = 'updated work item desc';
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemDeleted')).toEqual([[newDesc]]);
+ expect(hideModal).toHaveBeenCalled();
+ expect(mutationMock).toHaveBeenCalledWith({
+ input: {
+ id: defaultPropsData.issueGid,
+ lockVersion: 1,
+ taskData: { id: defaultPropsData.workItemId, lineNumberEnd: 3, lineNumberStart: 3 },
+ },
+ });
+ });
+
+ it.each`
+ errorType | mutationMock | errorMessage
+ ${'an error in the mutation response'} | ${jest.fn().mockResolvedValue(deleteWorkItemFromTaskMutationErrorResponse)} | ${'Error'}
+ ${'a network error'} | ${jest.fn().mockRejectedValue(new Error('GraphQL networkError'))} | ${'GraphQL networkError'}
+ `(
+ 'shows an error message when there is $errorType',
+ async ({ mutationMock, errorMessage }) => {
+ createComponent({
+ lockVersion: 1,
+ lineNumberStart: '3',
+ lineNumberEnd: '3',
+ deleteWorkItemFromTaskMutationHandler: mutationMock,
+ });
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemDeleted')).toBeUndefined();
+ expect(hideModal).not.toHaveBeenCalled();
+ expect(findAlert().text()).toBe(errorMessage);
+ },
+ );
+ });
- expect(wrapper.emitted('workItemDeleted')).toEqual([[newDesc]]);
- expect(hideModal).toHaveBeenCalled();
+ describe('when there is no task data', () => {
+ it('emits workItemDeleted and closes modal', async () => {
+ const mutationMock = jest.fn().mockResolvedValue(deleteWorkItemResponse);
+ createComponent({ deleteWorkItemMutationHandler: mutationMock });
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemDeleted')).toEqual([[defaultPropsData.workItemId]]);
+ expect(hideModal).toHaveBeenCalled();
+ expect(mutationMock).toHaveBeenCalledWith({ input: { id: defaultPropsData.workItemId } });
+ });
+
+ it.each`
+ errorType | mutationMock | errorMessage
+ ${'an error in the mutation response'} | ${jest.fn().mockResolvedValue(deleteWorkItemMutationErrorResponse)} | ${'Error'}
+ ${'a network error'} | ${jest.fn().mockRejectedValue(new Error('GraphQL networkError'))} | ${'GraphQL networkError'}
+ `(
+ 'shows an error message when there is $errorType',
+ async ({ mutationMock, errorMessage }) => {
+ createComponent({ deleteWorkItemMutationHandler: mutationMock });
+
+ findWorkItemDetail().vm.$emit('deleteWorkItem');
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemDeleted')).toBeUndefined();
+ expect(hideModal).not.toHaveBeenCalled();
+ expect(findAlert().text()).toBe(errorMessage);
+ },
+ );
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index 93bf7286aa7..434c1db8a2c 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -1,13 +1,20 @@
import Vue from 'vue';
-import { GlForm, GlFormCombobox } from '@gitlab/ui';
+import { GlForm, GlFormInput, GlFormCombobox } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
+import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
+import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
-import { availableWorkItemsResponse, updateWorkItemMutationResponse } from '../../mock_data';
+import {
+ availableWorkItemsResponse,
+ projectWorkItemTypesQueryResponse,
+ createWorkItemMutationResponse,
+ updateWorkItemMutationResponse,
+} from '../../mock_data';
Vue.use(VueApollo);
@@ -15,14 +22,21 @@ describe('WorkItemLinksForm', () => {
let wrapper;
const updateMutationResolver = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
+ const createMutationResolver = jest.fn().mockResolvedValue(createWorkItemMutationResponse);
- const createComponent = async ({ listResponse = availableWorkItemsResponse } = {}) => {
+ const createComponent = async ({
+ listResponse = availableWorkItemsResponse,
+ typesResponse = projectWorkItemTypesQueryResponse,
+ parentConfidential = false,
+ } = {}) => {
wrapper = shallowMountExtended(WorkItemLinksForm, {
apolloProvider: createMockApollo([
[projectWorkItemsQuery, jest.fn().mockResolvedValue(listResponse)],
+ [projectWorkItemTypesQuery, jest.fn().mockResolvedValue(typesResponse)],
[updateWorkItemMutation, updateMutationResolver],
+ [createWorkItemMutation, createMutationResolver],
]),
- propsData: { issuableGid: 'gid://gitlab/WorkItem/1' },
+ propsData: { issuableGid: 'gid://gitlab/WorkItem/1', parentConfidential },
provide: {
projectPath: 'project/path',
},
@@ -33,6 +47,7 @@ describe('WorkItemLinksForm', () => {
const findForm = () => wrapper.findComponent(GlForm);
const findCombobox = () => wrapper.findComponent(GlFormCombobox);
+ const findInput = () => wrapper.findComponent(GlFormInput);
const findAddChildButton = () => wrapper.findByTestId('add-child-button');
beforeEach(async () => {
@@ -47,19 +62,73 @@ describe('WorkItemLinksForm', () => {
expect(findForm().exists()).toBe(true);
});
- it('passes available work items as prop when typing in combobox', async () => {
- findCombobox().vm.$emit('input', 'Task');
+ it('creates child task in non confidential parent', async () => {
+ findInput().vm.$emit('input', 'Create task test');
+
+ findForm().vm.$emit('submit', {
+ preventDefault: jest.fn(),
+ });
await waitForPromises();
+ expect(createMutationResolver).toHaveBeenCalledWith({
+ input: {
+ title: 'Create task test',
+ projectPath: 'project/path',
+ workItemTypeId: 'gid://gitlab/WorkItems::Type/3',
+ hierarchyWidget: {
+ parentId: 'gid://gitlab/WorkItem/1',
+ },
+ confidential: false,
+ },
+ });
+ });
+
+ it('creates child task in confidential parent', async () => {
+ await createComponent({ parentConfidential: true });
+
+ findInput().vm.$emit('input', 'Create confidential task');
- expect(findCombobox().exists()).toBe(true);
- expect(findCombobox().props('tokenList').length).toBe(2);
+ findForm().vm.$emit('submit', {
+ preventDefault: jest.fn(),
+ });
+ await waitForPromises();
+ expect(createMutationResolver).toHaveBeenCalledWith({
+ input: {
+ title: 'Create confidential task',
+ projectPath: 'project/path',
+ workItemTypeId: 'gid://gitlab/WorkItems::Type/3',
+ hierarchyWidget: {
+ parentId: 'gid://gitlab/WorkItem/1',
+ },
+ confidential: true,
+ },
+ });
});
- it('selects and add child', async () => {
+ // Follow up issue to turn this functionality back on https://gitlab.com/gitlab-org/gitlab/-/issues/368757
+ // eslint-disable-next-line jest/no-disabled-tests
+ it.skip('selects and add child', async () => {
findCombobox().vm.$emit('input', availableWorkItemsResponse.data.workspace.workItems.edges[0]);
findAddChildButton().vm.$emit('click');
await waitForPromises();
expect(updateMutationResolver).toHaveBeenCalled();
});
+
+ // eslint-disable-next-line jest/no-disabled-tests
+ describe.skip('when typing in combobox', () => {
+ beforeEach(async () => {
+ findCombobox().vm.$emit('input', 'Task');
+ await waitForPromises();
+ await jest.runOnlyPendingTimers();
+ });
+
+ it('passes available work items as prop', () => {
+ expect(findCombobox().exists()).toBe(true);
+ expect(findCombobox().props('tokenList').length).toBe(2);
+ });
+
+ it('passes action to create task', () => {
+ expect(findCombobox().props('actionList').length).toBe(1);
+ });
+ });
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
index f8471b7f167..287ec022d3f 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_menu_spec.js
@@ -1,75 +1,24 @@
-import Vue from 'vue';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
-import { cloneDeep } from 'lodash';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
+
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import waitForPromises from 'helpers/wait_for_promises';
import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue';
-import changeWorkItemParentMutation from '~/work_items/graphql/change_work_item_parent_link.mutation.graphql';
-import getWorkItemLinksQuery from '~/work_items/graphql/work_item_links.query.graphql';
-import { WIDGET_TYPE_HIERARCHY } from '~/work_items/constants';
-import { workItemHierarchyResponse, changeWorkItemParentMutationResponse } from '../../mock_data';
-
-Vue.use(VueApollo);
-
-const PARENT_ID = 'gid://gitlab/WorkItem/1';
-const WORK_ITEM_ID = 'gid://gitlab/WorkItem/3';
describe('WorkItemLinksMenu', () => {
let wrapper;
- let mockApollo;
-
- const $toast = {
- show: jest.fn(),
- };
-
- const createComponent = async ({
- data = {},
- mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse),
- } = {}) => {
- mockApollo = createMockApollo([
- [getWorkItemLinksQuery, jest.fn().mockResolvedValue(workItemHierarchyResponse)],
- [changeWorkItemParentMutation, mutationHandler],
- ]);
-
- mockApollo.clients.defaultClient.cache.writeQuery({
- query: getWorkItemLinksQuery,
- variables: {
- id: PARENT_ID,
- },
- data: workItemHierarchyResponse.data,
- });
- wrapper = shallowMountExtended(WorkItemLinksMenu, {
- data() {
- return {
- ...data,
- };
- },
- propsData: {
- workItemId: WORK_ITEM_ID,
- parentWorkItemId: PARENT_ID,
- },
- apolloProvider: mockApollo,
- mocks: {
- $toast,
- },
- });
-
- await waitForPromises();
+ const createComponent = () => {
+ wrapper = shallowMountExtended(WorkItemLinksMenu);
};
const findDropdown = () => wrapper.find(GlDropdown);
const findRemoveDropdownItem = () => wrapper.find(GlDropdownItem);
beforeEach(async () => {
- await createComponent();
+ createComponent();
});
afterEach(() => {
wrapper.destroy();
- mockApollo = null;
});
it('renders dropdown and dropdown items', () => {
@@ -77,65 +26,9 @@ describe('WorkItemLinksMenu', () => {
expect(findRemoveDropdownItem().exists()).toBe(true);
});
- it('calls correct mutation with correct variables', async () => {
- const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
-
- createComponent({ mutationHandler });
-
- findRemoveDropdownItem().vm.$emit('click');
-
- await waitForPromises();
-
- expect(mutationHandler).toHaveBeenCalledWith({
- id: WORK_ITEM_ID,
- parentId: null,
- });
- });
-
- it('shows toast when mutation succeeds', async () => {
- const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
-
- createComponent({ mutationHandler });
-
- findRemoveDropdownItem().vm.$emit('click');
-
- await waitForPromises();
-
- expect($toast.show).toHaveBeenCalledWith('Child removed', {
- action: { onClick: expect.anything(), text: 'Undo' },
- });
- });
-
- it('updates the cache when mutation succeeds', async () => {
- const mutationHandler = jest.fn().mockResolvedValue(changeWorkItemParentMutationResponse);
-
- createComponent({ mutationHandler });
-
- mockApollo.clients.defaultClient.cache.readQuery = jest.fn(
- () => workItemHierarchyResponse.data,
- );
-
- mockApollo.clients.defaultClient.cache.writeQuery = jest.fn();
-
+ it('emits removeChild event on click Remove', () => {
findRemoveDropdownItem().vm.$emit('click');
- await waitForPromises();
-
- // Remove the work item from parent's children
- const resp = cloneDeep(workItemHierarchyResponse);
- const index = resp.data.workItem.widgets
- .find((widget) => widget.type === WIDGET_TYPE_HIERARCHY)
- .children.nodes.findIndex((child) => child.id === WORK_ITEM_ID);
- resp.data.workItem.widgets
- .find((widget) => widget.type === WIDGET_TYPE_HIERARCHY)
- .children.nodes.splice(index, 1);
-
- expect(mockApollo.clients.defaultClient.cache.writeQuery).toHaveBeenCalledWith(
- expect.objectContaining({
- query: expect.anything(),
- variables: { id: PARENT_ID },
- data: resp.data,
- }),
- );
+ expect(wrapper.emitted('removeChild')).toHaveLength(1);
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
index 2ec9b1ec0ac..00f508f1548 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_spec.js
@@ -1,34 +1,85 @@
import Vue, { nextTick } from 'vue';
-import { GlBadge } from '@gitlab/ui';
+import { GlButton, GlIcon, GlAlert } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import SidebarEventHub from '~/sidebar/event_hub';
+import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import WorkItemLinks from '~/work_items/components/work_item_links/work_item_links.vue';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import changeWorkItemParentMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
import getWorkItemLinksQuery from '~/work_items/graphql/work_item_links.query.graphql';
-import { workItemHierarchyResponse, workItemHierarchyEmptyResponse } from '../../mock_data';
+import {
+ workItemHierarchyResponse,
+ workItemHierarchyEmptyResponse,
+ workItemHierarchyNoUpdatePermissionResponse,
+ changeWorkItemParentMutationResponse,
+ workItemQueryResponse,
+} from '../../mock_data';
Vue.use(VueApollo);
describe('WorkItemLinks', () => {
let wrapper;
+ let mockApollo;
+
+ const WORK_ITEM_ID = 'gid://gitlab/WorkItem/2';
+
+ const $toast = {
+ show: jest.fn(),
+ };
+
+ const mutationChangeParentHandler = jest
+ .fn()
+ .mockResolvedValue(changeWorkItemParentMutationResponse);
+
+ const childWorkItemQueryHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
+
+ const findChildren = () => wrapper.findAll('[data-testid="links-child"]');
+
+ const createComponent = async ({
+ data = {},
+ fetchHandler = jest.fn().mockResolvedValue(workItemHierarchyResponse),
+ mutationHandler = mutationChangeParentHandler,
+ } = {}) => {
+ mockApollo = createMockApollo(
+ [
+ [getWorkItemLinksQuery, fetchHandler],
+ [changeWorkItemParentMutation, mutationHandler],
+ [workItemQuery, childWorkItemQueryHandler],
+ ],
+ {},
+ { addTypename: true },
+ );
- const createComponent = async ({ response = workItemHierarchyResponse } = {}) => {
wrapper = shallowMountExtended(WorkItemLinks, {
- apolloProvider: createMockApollo([
- [getWorkItemLinksQuery, jest.fn().mockResolvedValue(response)],
- ]),
+ data() {
+ return {
+ ...data,
+ };
+ },
+ provide: {
+ projectPath: 'project/path',
+ },
propsData: { issuableId: 1 },
+ apolloProvider: mockApollo,
+ mocks: {
+ $toast,
+ },
});
await waitForPromises();
};
+ const findAlert = () => wrapper.findComponent(GlAlert);
const findToggleButton = () => wrapper.findByTestId('toggle-links');
const findLinksBody = () => wrapper.findByTestId('links-body');
const findEmptyState = () => wrapper.findByTestId('links-empty');
const findToggleAddFormButton = () => wrapper.findByTestId('toggle-add-form');
const findAddLinksForm = () => wrapper.findByTestId('add-links-form');
+ const findFirstLinksMenu = () => wrapper.findByTestId('links-menu');
+ const findChildrenCount = () => wrapper.findByTestId('children-count');
beforeEach(async () => {
await createComponent();
@@ -36,6 +87,7 @@ describe('WorkItemLinks', () => {
afterEach(() => {
wrapper.destroy();
+ mockApollo = null;
});
it('is expanded by default', () => {
@@ -43,7 +95,7 @@ describe('WorkItemLinks', () => {
expect(findLinksBody().exists()).toBe(true);
});
- it('expands on click toggle button', async () => {
+ it('collapses on click toggle button', async () => {
findToggleButton().vm.$emit('click');
await nextTick();
@@ -67,7 +119,9 @@ describe('WorkItemLinks', () => {
describe('when no child links', () => {
beforeEach(async () => {
- await createComponent({ response: workItemHierarchyEmptyResponse });
+ await createComponent({
+ fetchHandler: jest.fn().mockResolvedValue(workItemHierarchyEmptyResponse),
+ });
});
it('displays empty state if there are no children', () => {
@@ -78,9 +132,140 @@ describe('WorkItemLinks', () => {
it('renders all hierarchy widget children', () => {
expect(findLinksBody().exists()).toBe(true);
+ expect(findChildren()).toHaveLength(4);
+ expect(findFirstLinksMenu().exists()).toBe(true);
+ });
+
+ it('shows alert when list loading fails', async () => {
+ const errorMessage = 'Some error';
+ await createComponent({
+ fetchHandler: jest.fn().mockRejectedValue(new Error(errorMessage)),
+ });
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errorMessage);
+ });
+
+ it('renders widget child icon and tooltip', () => {
+ expect(findChildren().at(0).findComponent(GlIcon).props('name')).toBe('issue-open-m');
+ expect(findChildren().at(1).findComponent(GlIcon).props('name')).toBe('issue-close');
+ });
+
+ it('renders confidentiality icon when child item is confidential', () => {
const children = wrapper.findAll('[data-testid="links-child"]');
+ const confidentialIcon = children.at(0).find('[data-testid="confidential-icon"]');
+
+ expect(confidentialIcon.exists()).toBe(true);
+ expect(confidentialIcon.props('name')).toBe('eye-slash');
+ });
+
+ it('displays number if children', () => {
+ expect(findChildrenCount().exists()).toBe(true);
+
+ expect(findChildrenCount().text()).toContain('4');
+ });
+
+ it('refetches child items when `confidentialityUpdated` event is emitted on SidebarEventhub', async () => {
+ const fetchHandler = jest.fn().mockResolvedValue(workItemHierarchyResponse);
+ await createComponent({
+ fetchHandler,
+ });
+ await waitForPromises();
+
+ SidebarEventHub.$emit('confidentialityUpdated');
+ await nextTick();
+
+ // First call is done on component mount.
+ // Second call is done on confidentialityUpdated event.
+ expect(fetchHandler).toHaveBeenCalledTimes(2);
+ });
+
+ describe('when no permission to update', () => {
+ beforeEach(async () => {
+ await createComponent({
+ fetchHandler: jest.fn().mockResolvedValue(workItemHierarchyNoUpdatePermissionResponse),
+ });
+ });
- expect(children).toHaveLength(4);
- expect(children.at(0).findComponent(GlBadge).text()).toBe('Open');
+ it('does not display button to toggle Add form', () => {
+ expect(findToggleAddFormButton().exists()).toBe(false);
+ });
+
+ it('does not display link menu on children', () => {
+ expect(findFirstLinksMenu().exists()).toBe(false);
+ });
+ });
+
+ describe('remove child', () => {
+ beforeEach(async () => {
+ await createComponent({ mutationHandler: mutationChangeParentHandler });
+ });
+
+ it('calls correct mutation with correct variables', async () => {
+ findFirstLinksMenu().vm.$emit('removeChild');
+
+ await waitForPromises();
+
+ expect(mutationChangeParentHandler).toHaveBeenCalledWith({
+ input: {
+ id: WORK_ITEM_ID,
+ hierarchyWidget: {
+ parentId: null,
+ },
+ },
+ });
+ });
+
+ it('shows toast when mutation succeeds', async () => {
+ findFirstLinksMenu().vm.$emit('removeChild');
+
+ await waitForPromises();
+
+ expect($toast.show).toHaveBeenCalledWith('Child removed', {
+ action: { onClick: expect.anything(), text: 'Undo' },
+ });
+ });
+
+ it('renders correct number of children after removal', async () => {
+ expect(findChildren()).toHaveLength(4);
+
+ findFirstLinksMenu().vm.$emit('removeChild');
+ await waitForPromises();
+
+ expect(findChildren()).toHaveLength(3);
+ });
+ });
+
+ describe('prefetching child items', () => {
+ beforeEach(async () => {
+ await createComponent();
+ });
+
+ const findChildLink = () => findChildren().at(0).findComponent(GlButton);
+
+ it('does not fetch the child work item before hovering work item links', () => {
+ expect(childWorkItemQueryHandler).not.toHaveBeenCalled();
+ });
+
+ it('fetches the child work item if link is hovered for 250+ ms', async () => {
+ findChildLink().vm.$emit('mouseover');
+ jest.advanceTimersByTime(DEFAULT_DEBOUNCE_AND_THROTTLE_MS);
+ await waitForPromises();
+
+ expect(childWorkItemQueryHandler).toHaveBeenCalledWith({
+ id: 'gid://gitlab/WorkItem/2',
+ });
+ });
+
+ it('does not fetch the child work item if link is hovered for less than 250 ms', async () => {
+ findChildLink().vm.$emit('mouseover');
+ jest.advanceTimersByTime(200);
+ findChildLink().vm.$emit('mouseout');
+ await waitForPromises();
+
+ expect(childWorkItemQueryHandler).not.toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_state_spec.js b/spec/frontend/work_items/components/work_item_state_spec.js
index b379d1fc846..6b23a6e4795 100644
--- a/spec/frontend/work_items/components/work_item_state_spec.js
+++ b/spec/frontend/work_items/components/work_item_state_spec.js
@@ -29,6 +29,7 @@ describe('WorkItemState component', () => {
const createComponent = ({
state = STATE_OPEN,
mutationHandler = mutationSuccessHandler,
+ canUpdate = true,
} = {}) => {
const { id, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemState, {
@@ -39,6 +40,7 @@ describe('WorkItemState component', () => {
state,
workItemType,
},
+ canUpdate,
},
});
};
@@ -53,6 +55,20 @@ describe('WorkItemState component', () => {
expect(findItemState().props('state')).toBe(workItemQueryResponse.data.workItem.state);
});
+ describe('item state disabled prop', () => {
+ describe.each`
+ description | canUpdate | value
+ ${'when cannot update'} | ${false} | ${true}
+ ${'when can update'} | ${true} | ${false}
+ `('$description', ({ canUpdate, value }) => {
+ it(`renders item state component with disabled=${value}`, () => {
+ createComponent({ canUpdate });
+
+ expect(findItemState().props('disabled')).toBe(value);
+ });
+ });
+ });
+
describe('when updating the state', () => {
it('calls a mutation', () => {
createComponent();
diff --git a/spec/frontend/work_items/components/work_item_title_spec.js b/spec/frontend/work_items/components/work_item_title_spec.js
index a48449bb636..c0d966abab8 100644
--- a/spec/frontend/work_items/components/work_item_title_spec.js
+++ b/spec/frontend/work_items/components/work_item_title_spec.js
@@ -20,7 +20,11 @@ describe('WorkItemTitle component', () => {
const findItemTitle = () => wrapper.findComponent(ItemTitle);
- const createComponent = ({ workItemParentId, mutationHandler = mutationSuccessHandler } = {}) => {
+ const createComponent = ({
+ workItemParentId,
+ mutationHandler = mutationSuccessHandler,
+ canUpdate = true,
+ } = {}) => {
const { id, title, workItemType } = workItemQueryResponse.data.workItem;
wrapper = shallowMount(WorkItemTitle, {
apolloProvider: createMockApollo([
@@ -32,6 +36,7 @@ describe('WorkItemTitle component', () => {
workItemTitle: title,
workItemType: workItemType.name,
workItemParentId,
+ canUpdate,
},
});
};
@@ -46,6 +51,20 @@ describe('WorkItemTitle component', () => {
expect(findItemTitle().props('title')).toBe(workItemQueryResponse.data.workItem.title);
});
+ describe('item title disabled prop', () => {
+ describe.each`
+ description | canUpdate | value
+ ${'when cannot update'} | ${false} | ${true}
+ ${'when can update'} | ${true} | ${false}
+ `('$description', ({ canUpdate, value }) => {
+ it(`renders item title component with disabled=${value}`, () => {
+ createComponent({ canUpdate });
+
+ expect(findItemTitle().props('disabled')).toBe(value);
+ });
+ });
+ });
+
describe('when updating the title', () => {
it('calls a mutation', () => {
const title = 'new title!';
diff --git a/spec/frontend/work_items/components/work_item_type_icon_spec.js b/spec/frontend/work_items/components/work_item_type_icon_spec.js
new file mode 100644
index 00000000000..85466578e18
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_type_icon_spec.js
@@ -0,0 +1,47 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import WorkItemTypeIcon from '~/work_items/components/work_item_type_icon.vue';
+
+let wrapper;
+
+function createComponent(propsData) {
+ wrapper = shallowMount(WorkItemTypeIcon, { propsData });
+}
+
+describe('Work Item type component', () => {
+ const findIcon = () => wrapper.findComponent(GlIcon);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each`
+ workItemType | workItemIconName | iconName | text
+ ${'TASK'} | ${''} | ${'issue-type-task'} | ${'Task'}
+ ${''} | ${'issue-type-task'} | ${'issue-type-task'} | ${''}
+ ${'ISSUE'} | ${''} | ${'issue-type-issue'} | ${'Issue'}
+ ${''} | ${'issue-type-issue'} | ${'issue-type-issue'} | ${''}
+ ${'REQUIREMENTS'} | ${''} | ${'issue-type-requirements'} | ${'Requirements'}
+ ${'INCIDENT'} | ${''} | ${'issue-type-incident'} | ${'Incident'}
+ ${'TEST_CASE'} | ${''} | ${'issue-type-test-case'} | ${'Test case'}
+ ${'random-issue-type'} | ${''} | ${'issue-type-issue'} | ${''}
+ `(
+ 'with workItemType set to "$workItemType" and workItemIconName set to "$workItemIconName"',
+ ({ workItemType, workItemIconName, iconName, text }) => {
+ beforeEach(() => {
+ createComponent({
+ workItemType,
+ workItemIconName,
+ });
+ });
+
+ it(`renders icon with name '${iconName}'`, () => {
+ expect(findIcon().props('name')).toBe(iconName);
+ });
+
+ it(`renders correct text`, () => {
+ expect(wrapper.text()).toBe(text);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/work_items/components/work_item_weight_spec.js b/spec/frontend/work_items/components/work_item_weight_spec.js
index c3bbea26cda..94bdb336deb 100644
--- a/spec/frontend/work_items/components/work_item_weight_spec.js
+++ b/spec/frontend/work_items/components/work_item_weight_spec.js
@@ -1,16 +1,21 @@
import { GlForm, GlFormInput } from '@gitlab/ui';
-import { nextTick } from 'vue';
+import Vue, { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
import { mockTracking } from 'helpers/tracking_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import { __ } from '~/locale';
import WorkItemWeight from '~/work_items/components/work_item_weight.vue';
-import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
-import localUpdateWorkItemMutation from '~/work_items/graphql/local_update_work_item.mutation.graphql';
+import { i18n, TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import { updateWorkItemMutationResponse } from 'jest/work_items/mock_data';
describe('WorkItemWeight component', () => {
+ Vue.use(VueApollo);
+
let wrapper;
- const mutateSpy = jest.fn();
const workItemId = 'gid://gitlab/WorkItem/1';
const workItemType = 'Task';
@@ -22,8 +27,10 @@ describe('WorkItemWeight component', () => {
hasIssueWeightsFeature = true,
isEditing = false,
weight,
+ mutationHandler = jest.fn().mockResolvedValue(updateWorkItemMutationResponse),
} = {}) => {
wrapper = mountExtended(WorkItemWeight, {
+ apolloProvider: createMockApollo([[updateWorkItemMutation, mutationHandler]]),
propsData: {
canUpdate,
weight,
@@ -33,11 +40,6 @@ describe('WorkItemWeight component', () => {
provide: {
hasIssueWeightsFeature,
},
- mocks: {
- $apollo: {
- mutate: mutateSpy,
- },
- },
});
if (isEditing) {
@@ -131,26 +133,73 @@ describe('WorkItemWeight component', () => {
});
describe('when blurred', () => {
- it('calls a mutation to update the weight', () => {
- const weight = 0;
- createComponent({ isEditing: true, weight });
+ it('calls a mutation to update the weight when the input value is different', () => {
+ const mutationSpy = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
+ createComponent({
+ isEditing: true,
+ weight: 0,
+ mutationHandler: mutationSpy,
+ canUpdate: true,
+ });
+
+ findInput().vm.$emit('blur', { target: { value: 1 } });
+
+ expect(mutationSpy).toHaveBeenCalledWith({
+ input: {
+ id: workItemId,
+ weightWidget: {
+ weight: 1,
+ },
+ },
+ });
+ });
+
+ it('does not call a mutation to update the weight when the input value is the same', () => {
+ const mutationSpy = jest.fn().mockResolvedValue(updateWorkItemMutationResponse);
+ createComponent({ isEditing: true, mutationHandler: mutationSpy, canUpdate: true });
findInput().trigger('blur');
- expect(mutateSpy).toHaveBeenCalledWith({
- mutation: localUpdateWorkItemMutation,
- variables: {
- input: {
- id: workItemId,
- weight,
+ expect(mutationSpy).not.toHaveBeenCalledWith();
+ });
+
+ it('emits an error when there is a GraphQL error', async () => {
+ const response = {
+ data: {
+ workItemUpdate: {
+ errors: ['Error!'],
+ workItem: {},
},
},
+ };
+ createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue(response),
+ canUpdate: true,
+ });
+
+ findInput().trigger('blur');
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.updateError]]);
+ });
+
+ it('emits an error when there is a network error', async () => {
+ createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockRejectedValue(new Error()),
+ canUpdate: true,
});
+
+ findInput().trigger('blur');
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[i18n.updateError]]);
});
it('tracks updating the weight', () => {
const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
- createComponent();
+ createComponent({ canUpdate: true });
findInput().trigger('blur');
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 0359caf7116..d24ac2a9f93 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -25,10 +25,14 @@ export const workItemQueryResponse = {
title: 'Test',
state: 'OPEN',
description: 'description',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
deleteWorkItem: false,
@@ -46,6 +50,7 @@ export const workItemQueryResponse = {
__typename: 'WorkItemWidgetAssignees',
type: 'ASSIGNEES',
allowsMultipleAssignees: true,
+ canInviteMembers: true,
assignees: {
nodes: mockAssignees,
},
@@ -57,13 +62,14 @@ export const workItemQueryResponse = {
id: 'gid://gitlab/Issue/1',
iid: '5',
title: 'Parent title',
+ confidential: false,
},
children: {
- edges: [
+ nodes: [
{
- node: {
- id: 'gid://gitlab/WorkItem/444',
- },
+ id: 'gid://gitlab/WorkItem/444',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
},
],
},
@@ -77,16 +83,21 @@ export const updateWorkItemMutationResponse = {
data: {
workItemUpdate: {
__typename: 'WorkItemUpdatePayload',
+ errors: [],
workItem: {
__typename: 'WorkItem',
id: 'gid://gitlab/WorkItem/1',
title: 'Updated title',
state: 'OPEN',
description: 'description',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
deleteWorkItem: false,
@@ -95,24 +106,46 @@ export const updateWorkItemMutationResponse = {
widgets: [
{
children: {
- edges: [
+ nodes: [
{
- node: 'gid://gitlab/WorkItem/444',
+ id: 'gid://gitlab/WorkItem/444',
},
],
},
},
+ {
+ __typename: 'WorkItemWidgetAssignees',
+ type: 'ASSIGNEES',
+ allowsMultipleAssignees: true,
+ canInviteMembers: true,
+ assignees: {
+ nodes: [mockAssignees[0]],
+ },
+ },
],
},
},
},
};
+export const mockParent = {
+ parent: {
+ id: 'gid://gitlab/Issue/1',
+ iid: '5',
+ title: 'Parent title',
+ confidential: false,
+ },
+};
+
export const workItemResponseFactory = ({
canUpdate = false,
+ canDelete = false,
allowsMultipleAssignees = true,
assigneesWidgetPresent = true,
- parent = null,
+ weightWidgetPresent = true,
+ confidential = false,
+ canInviteMembers = false,
+ parent = mockParent.parent,
} = {}) => ({
data: {
workItem: {
@@ -121,13 +154,17 @@ export const workItemResponseFactory = ({
title: 'Updated title',
state: 'OPEN',
description: 'description',
+ confidential,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
- deleteWorkItem: false,
+ deleteWorkItem: canDelete,
updateWorkItem: canUpdate,
},
widgets: [
@@ -143,20 +180,28 @@ export const workItemResponseFactory = ({
__typename: 'WorkItemWidgetAssignees',
type: 'ASSIGNEES',
allowsMultipleAssignees,
+ canInviteMembers,
assignees: {
nodes: mockAssignees,
},
}
: { type: 'MOCK TYPE' },
+ weightWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetWeight',
+ type: 'WEIGHT',
+ weight: 0,
+ }
+ : { type: 'MOCK TYPE' },
{
__typename: 'WorkItemWidgetHierarchy',
type: 'HIERARCHY',
children: {
- edges: [
+ nodes: [
{
- node: {
- id: 'gid://gitlab/WorkItem/444',
- },
+ id: 'gid://gitlab/WorkItem/444',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
},
],
},
@@ -203,10 +248,14 @@ export const createWorkItemMutationResponse = {
title: 'Updated title',
state: 'OPEN',
description: 'description',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
deleteWorkItem: false,
@@ -214,6 +263,7 @@ export const createWorkItemMutationResponse = {
},
widgets: [],
},
+ errors: [],
},
},
};
@@ -229,10 +279,14 @@ export const createWorkItemFromTaskMutationResponse = {
id: 'gid://gitlab/WorkItem/1',
title: 'Updated title',
state: 'OPEN',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
deleteWorkItem: false,
@@ -252,11 +306,15 @@ export const createWorkItemFromTaskMutationResponse = {
id: 'gid://gitlab/WorkItem/1000000',
title: 'Updated title',
state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
description: '',
+ confidential: false,
workItemType: {
__typename: 'WorkItemType',
id: 'gid://gitlab/WorkItems::Type/5',
name: 'Task',
+ iconName: 'issue-type-task',
},
userPermissions: {
deleteWorkItem: false,
@@ -284,6 +342,32 @@ export const deleteWorkItemFailureResponse = {
],
};
+export const deleteWorkItemMutationErrorResponse = {
+ data: {
+ workItemDelete: {
+ errors: ['Error'],
+ },
+ },
+};
+
+export const deleteWorkItemFromTaskMutationResponse = {
+ data: {
+ workItemDeleteTask: {
+ workItem: { id: 123, descriptionHtml: 'updated work item desc' },
+ errors: [],
+ },
+ },
+};
+
+export const deleteWorkItemFromTaskMutationErrorResponse = {
+ data: {
+ workItemDeleteTask: {
+ workItem: { id: 123, descriptionHtml: 'updated work item desc' },
+ errors: ['Error'],
+ },
+ },
+};
+
export const workItemTitleSubscriptionResponse = {
data: {
issuableTitleUpdated: {
@@ -302,6 +386,13 @@ export const workItemHierarchyEmptyResponse = {
__typename: 'WorkItemType',
},
title: 'New title',
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ userPermissions: {
+ deleteWorkItem: false,
+ updateWorkItem: false,
+ },
+ confidential: false,
widgets: [
{
type: 'DESCRIPTION',
@@ -322,6 +413,54 @@ export const workItemHierarchyEmptyResponse = {
},
};
+export const workItemHierarchyNoUpdatePermissionResponse = {
+ data: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/1',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/6',
+ __typename: 'WorkItemType',
+ },
+ title: 'New title',
+ userPermissions: {
+ deleteWorkItem: false,
+ updateWorkItem: false,
+ },
+ confidential: false,
+ widgets: [
+ {
+ type: 'DESCRIPTION',
+ __typename: 'WorkItemWidgetDescription',
+ },
+ {
+ type: 'HIERARCHY',
+ parent: null,
+ children: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WorkItem/2',
+ workItemType: {
+ id: 'gid://gitlab/WorkItems::Type/5',
+ __typename: 'WorkItemType',
+ },
+ title: 'xyz',
+ state: 'OPEN',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ __typename: 'WorkItem',
+ },
+ ],
+ __typename: 'WorkItemConnection',
+ },
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ ],
+ __typename: 'WorkItem',
+ },
+ },
+};
+
export const workItemHierarchyResponse = {
data: {
workItem: {
@@ -331,6 +470,11 @@ export const workItemHierarchyResponse = {
__typename: 'WorkItemType',
},
title: 'New title',
+ userPermissions: {
+ deleteWorkItem: true,
+ updateWorkItem: true,
+ },
+ confidential: false,
widgets: [
{
type: 'DESCRIPTION',
@@ -349,6 +493,9 @@ export const workItemHierarchyResponse = {
},
title: 'xyz',
state: 'OPEN',
+ confidential: true,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
__typename: 'WorkItem',
},
{
@@ -359,6 +506,9 @@ export const workItemHierarchyResponse = {
},
title: 'abc',
state: 'CLOSED',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: '2022-08-12T13:07:52Z',
__typename: 'WorkItem',
},
{
@@ -369,6 +519,9 @@ export const workItemHierarchyResponse = {
},
title: 'bar',
state: 'OPEN',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
__typename: 'WorkItem',
},
{
@@ -379,6 +532,9 @@ export const workItemHierarchyResponse = {
},
title: 'foobar',
state: 'OPEN',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
__typename: 'WorkItem',
},
],
@@ -396,14 +552,34 @@ export const changeWorkItemParentMutationResponse = {
data: {
workItemUpdate: {
workItem: {
- id: 'gid://gitlab/WorkItem/2',
+ __typename: 'WorkItem',
workItemType: {
- id: 'gid://gitlab/WorkItems::Type/5',
__typename: 'WorkItemType',
+ id: 'gid://gitlab/WorkItems::Type/1',
+ name: 'Issue',
+ iconName: 'issue-type-issue',
},
- title: 'Foo',
+ userPermissions: {
+ deleteWorkItem: true,
+ updateWorkItem: true,
+ },
+ description: null,
+ id: 'gid://gitlab/WorkItem/2',
state: 'OPEN',
- __typename: 'WorkItem',
+ title: 'Foo',
+ confidential: false,
+ createdAt: '2022-08-03T12:41:54Z',
+ closedAt: null,
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ type: 'HIERARCHY',
+ parent: null,
+ children: {
+ nodes: [],
+ },
+ },
+ ],
},
errors: [],
__typename: 'WorkItemUpdatePayload',
@@ -423,6 +599,7 @@ export const availableWorkItemsResponse = {
id: 'gid://gitlab/WorkItem/458',
title: 'Task 1',
state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
},
},
{
@@ -430,6 +607,7 @@ export const availableWorkItemsResponse = {
id: 'gid://gitlab/WorkItem/459',
title: 'Task 2',
state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
},
},
],
@@ -551,11 +729,3 @@ export const projectLabelsResponse = {
},
},
};
-
-export const mockParent = {
- parent: {
- id: 'gid://gitlab/Issue/1',
- iid: '5',
- title: 'Parent title',
- },
-};
diff --git a/spec/frontend/work_items/pages/work_item_detail_spec.js b/spec/frontend/work_items/pages/work_item_detail_spec.js
index 43869468ad0..823981df880 100644
--- a/spec/frontend/work_items/pages/work_item_detail_spec.js
+++ b/spec/frontend/work_items/pages/work_item_detail_spec.js
@@ -1,11 +1,12 @@
-import { GlAlert, GlSkeletonLoader, GlButton } from '@gitlab/ui';
+import { GlAlert, GlBadge, GlLoadingIcon, GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
+import WorkItemActions from '~/work_items/components/work_item_actions.vue';
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
import WorkItemState from '~/work_items/components/work_item_state.vue';
import WorkItemTitle from '~/work_items/components/work_item_title.vue';
@@ -16,6 +17,8 @@ import WorkItemInformation from '~/work_items/components/work_item_information.v
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemTitleSubscription from '~/work_items/graphql/work_item_title.subscription.graphql';
+import updateWorkItemMutation from '~/work_items/graphql/update_work_item.mutation.graphql';
+import updateWorkItemTaskMutation from '~/work_items/graphql/update_work_item_task.mutation.graphql';
import { temporaryConfig } from '~/work_items/graphql/provider';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import {
@@ -30,12 +33,19 @@ describe('WorkItemDetail component', () => {
Vue.use(VueApollo);
- const workItemQueryResponse = workItemResponseFactory();
+ const workItemQueryResponse = workItemResponseFactory({ canUpdate: true, canDelete: true });
+ const workItemQueryResponseWithoutParent = workItemResponseFactory({
+ parent: null,
+ canUpdate: true,
+ canDelete: true,
+ });
const successHandler = jest.fn().mockResolvedValue(workItemQueryResponse);
const initialSubscriptionHandler = jest.fn().mockResolvedValue(workItemTitleSubscriptionResponse);
const findAlert = () => wrapper.findComponent(GlAlert);
const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findWorkItemActions = () => wrapper.findComponent(WorkItemActions);
const findWorkItemTitle = () => wrapper.findComponent(WorkItemTitle);
const findWorkItemState = () => wrapper.findComponent(WorkItemState);
const findWorkItemDescription = () => wrapper.findComponent(WorkItemDescription);
@@ -51,17 +61,21 @@ describe('WorkItemDetail component', () => {
const createComponent = ({
isModal = false,
+ updateInProgress = false,
workItemId = workItemQueryResponse.data.workItem.id,
handler = successHandler,
subscriptionHandler = initialSubscriptionHandler,
+ confidentialityMock = [updateWorkItemMutation, jest.fn()],
workItemsMvc2Enabled = false,
includeWidgets = false,
+ error = undefined,
} = {}) => {
wrapper = shallowMount(WorkItemDetail, {
apolloProvider: createMockApollo(
[
[workItemQuery, handler],
[workItemTitleSubscription, subscriptionHandler],
+ confidentialityMock,
],
{},
{
@@ -69,6 +83,12 @@ describe('WorkItemDetail component', () => {
},
),
propsData: { isModal, workItemId },
+ data() {
+ return {
+ updateInProgress,
+ error,
+ };
+ },
provide: {
glFeatures: {
workItemsMvc2: workItemsMvc2Enabled,
@@ -146,6 +166,148 @@ describe('WorkItemDetail component', () => {
});
});
+ describe('confidentiality', () => {
+ const errorMessage = 'Mutation failed';
+ const confidentialWorkItem = workItemResponseFactory({
+ confidential: true,
+ });
+
+ // Mocks for work item without parent
+ const withoutParentExpectedInputVars = {
+ id: workItemQueryResponse.data.workItem.id,
+ confidential: true,
+ };
+ const toggleConfidentialityWithoutParentHandler = jest.fn().mockResolvedValue({
+ data: {
+ workItemUpdate: {
+ workItem: confidentialWorkItem.data.workItem,
+ errors: [],
+ },
+ },
+ });
+ const withoutParentHandlerMock = jest
+ .fn()
+ .mockResolvedValue(workItemQueryResponseWithoutParent);
+ const confidentialityWithoutParentMock = [
+ updateWorkItemMutation,
+ toggleConfidentialityWithoutParentHandler,
+ ];
+ const confidentialityWithoutParentFailureMock = [
+ updateWorkItemMutation,
+ jest.fn().mockRejectedValue(new Error(errorMessage)),
+ ];
+
+ // Mocks for work item with parent
+ const withParentExpectedInputVars = {
+ id: mockParent.parent.id,
+ taskData: { id: workItemQueryResponse.data.workItem.id, confidential: true },
+ };
+ const toggleConfidentialityWithParentHandler = jest.fn().mockResolvedValue({
+ data: {
+ workItemUpdate: {
+ workItem: {
+ id: confidentialWorkItem.data.workItem.id,
+ descriptionHtml: confidentialWorkItem.data.workItem.description,
+ },
+ task: {
+ workItem: confidentialWorkItem.data.workItem,
+ confidential: true,
+ },
+ errors: [],
+ },
+ },
+ });
+ const confidentialityWithParentMock = [
+ updateWorkItemTaskMutation,
+ toggleConfidentialityWithParentHandler,
+ ];
+ const confidentialityWithParentFailureMock = [
+ updateWorkItemTaskMutation,
+ jest.fn().mockRejectedValue(new Error(errorMessage)),
+ ];
+
+ describe.each`
+ context | handlerMock | confidentialityMock | confidentialityFailureMock | inputVariables
+ ${'no parent'} | ${withoutParentHandlerMock} | ${confidentialityWithoutParentMock} | ${confidentialityWithoutParentFailureMock} | ${withoutParentExpectedInputVars}
+ ${'parent'} | ${successHandler} | ${confidentialityWithParentMock} | ${confidentialityWithParentFailureMock} | ${withParentExpectedInputVars}
+ `(
+ 'when work item has $context',
+ ({ handlerMock, confidentialityMock, confidentialityFailureMock, inputVariables }) => {
+ it('renders confidential badge when work item is confidential', async () => {
+ createComponent({
+ handler: jest.fn().mockResolvedValue(confidentialWorkItem),
+ confidentialityMock,
+ });
+
+ await waitForPromises();
+
+ const confidentialBadge = wrapper.findComponent(GlBadge);
+ expect(confidentialBadge.exists()).toBe(true);
+ expect(confidentialBadge.props()).toMatchObject({
+ variant: 'warning',
+ icon: 'eye-slash',
+ });
+ expect(confidentialBadge.attributes('title')).toBe(
+ 'Only project members with at least the Reporter role, the author, and assignees can view or be notified about this task.',
+ );
+ expect(confidentialBadge.text()).toBe('Confidential');
+ });
+
+ it('renders gl-loading-icon while update mutation is in progress', async () => {
+ createComponent({
+ handler: handlerMock,
+ confidentialityMock,
+ });
+
+ await waitForPromises();
+
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+
+ await nextTick();
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('emits workItemUpdated and shows confidentiality badge when mutation is successful', async () => {
+ createComponent({
+ handler: handlerMock,
+ confidentialityMock,
+ });
+
+ await waitForPromises();
+
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemUpdated')).toEqual([[{ confidential: true }]]);
+ expect(confidentialityMock[1]).toHaveBeenCalledWith({
+ input: inputVariables,
+ });
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+
+ it('shows alert message when mutation fails', async () => {
+ createComponent({
+ handler: handlerMock,
+ confidentialityMock: confidentialityFailureMock,
+ });
+
+ await waitForPromises();
+ findWorkItemActions().vm.$emit('toggleWorkItemConfidentiality', true);
+ await waitForPromises();
+
+ expect(wrapper.emitted('workItemUpdated')).toBeFalsy();
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(errorMessage);
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ },
+ );
+ });
+
describe('description', () => {
it('does not show description widget if loading description fails', () => {
createComponent();
@@ -169,7 +331,7 @@ describe('WorkItemDetail component', () => {
});
it('does not show secondary breadcrumbs if there is not a parent', async () => {
- createComponent();
+ createComponent({ handler: jest.fn().mockResolvedValue(workItemQueryResponseWithoutParent) });
await waitForPromises();
@@ -177,7 +339,7 @@ describe('WorkItemDetail component', () => {
});
it('shows work item type if there is not a parent', async () => {
- createComponent();
+ createComponent({ handler: jest.fn().mockResolvedValue(workItemQueryResponseWithoutParent) });
await waitForPromises();
expect(findWorkItemType().exists()).toBe(true);
@@ -276,34 +438,29 @@ describe('WorkItemDetail component', () => {
});
describe('weight widget', () => {
- describe('when work_items_mvc_2 feature flag is enabled', () => {
- describe.each`
- description | includeWidgets | exists
- ${'when widget is returned from API'} | ${true} | ${true}
- ${'when widget is not returned from API'} | ${false} | ${false}
- `('$description', ({ includeWidgets, exists }) => {
- it(`${includeWidgets ? 'renders' : 'does not render'} weight component`, async () => {
- createComponent({ includeWidgets, workItemsMvc2Enabled: true });
- await waitForPromises();
+ describe.each`
+ description | weightWidgetPresent | exists
+ ${'when widget is returned from API'} | ${true} | ${true}
+ ${'when widget is not returned from API'} | ${false} | ${false}
+ `('$description', ({ weightWidgetPresent, exists }) => {
+ it(`${weightWidgetPresent ? 'renders' : 'does not render'} weight component`, async () => {
+ const response = workItemResponseFactory({ weightWidgetPresent });
+ const handler = jest.fn().mockResolvedValue(response);
+ createComponent({ handler });
+ await waitForPromises();
- expect(findWorkItemWeight().exists()).toBe(exists);
- });
+ expect(findWorkItemWeight().exists()).toBe(exists);
});
});
- describe('when work_items_mvc_2 feature flag is disabled', () => {
- describe.each`
- description | includeWidgets | exists
- ${'when widget is returned from API'} | ${true} | ${false}
- ${'when widget is not returned from API'} | ${false} | ${false}
- `('$description', ({ includeWidgets, exists }) => {
- it(`${includeWidgets ? 'renders' : 'does not render'} weight component`, async () => {
- createComponent({ includeWidgets, workItemsMvc2Enabled: false });
- await waitForPromises();
+ it('shows an error message when it emits an `error` event', async () => {
+ createComponent({ workItemsMvc2Enabled: true });
+ await waitForPromises();
- expect(findWorkItemWeight().exists()).toBe(exists);
- });
- });
+ findWorkItemWeight().vm.$emit('error', i18n.updateError);
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe(i18n.updateError);
});
});
diff --git a/spec/frontend/work_items_hierarchy/components/app_spec.js b/spec/frontend/work_items_hierarchy/components/app_spec.js
index 092e9c90553..1426fbfab80 100644
--- a/spec/frontend/work_items_hierarchy/components/app_spec.js
+++ b/spec/frontend/work_items_hierarchy/components/app_spec.js
@@ -1,19 +1,17 @@
-import { nextTick } from 'vue';
-import { createLocalVue, mount } from '@vue/test-utils';
+import Vue, { nextTick } from 'vue';
+import { mount } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import { GlBanner } from '@gitlab/ui';
import App from '~/work_items_hierarchy/components/app.vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+Vue.use(VueApollo);
describe('WorkItemsHierarchy App', () => {
let wrapper;
const createComponent = (props = {}, data = {}) => {
wrapper = extendedWrapper(
mount(App, {
- localVue,
provide: {
illustrationPath: '/foo.svg',
licensePlan: 'free',
diff --git a/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js b/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
index 74774e38d6b..67420e7fc2a 100644
--- a/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
+++ b/spec/frontend/work_items_hierarchy/components/hierarchy_spec.js
@@ -1,4 +1,5 @@
-import { createLocalVue, mount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { GlBadge } from '@gitlab/ui';
import Hierarchy from '~/work_items_hierarchy/components/hierarchy.vue';
@@ -6,8 +7,7 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import RESPONSE from '~/work_items_hierarchy/static_response';
import { workItemTypes } from '~/work_items_hierarchy/constants';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+Vue.use(VueApollo);
describe('WorkItemsHierarchy Hierarchy', () => {
let wrapper;
@@ -32,7 +32,6 @@ describe('WorkItemsHierarchy Hierarchy', () => {
const createComponent = (props = {}) => {
wrapper = extendedWrapper(
mount(Hierarchy, {
- localVue,
propsData: {
workItemTypes: props.workItemTypes,
...props,
diff --git a/spec/frontend_integration/content_editor/content_editor_integration_spec.js b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
index 89b8d8d6d94..12cd6dcad83 100644
--- a/spec/frontend_integration/content_editor/content_editor_integration_spec.js
+++ b/spec/frontend_integration/content_editor/content_editor_integration_spec.js
@@ -61,29 +61,69 @@ describe('content_editor', () => {
});
});
- it('renders footnote ids alongside the footnote definition', async () => {
+ describe('when preserveUnchangedMarkdown feature flag is enabled', () => {
+ beforeEach(() => {
+ gon.features = { preserveUnchangedMarkdown: true };
+ });
+ afterEach(() => {
+ gon.features = { preserveUnchangedMarkdown: false };
+ });
+
+ it('processes and renders footnote ids alongside the footnote definition', async () => {
+ buildWrapper();
+
+ await contentEditorService.setSerializedContent(`
+This reference tag is a mix of letters and numbers [^footnote].
+
+[^footnote]: This is another footnote.
+ `);
+ await nextTick();
+
+ expect(wrapper.text()).toContain('footnote: This is another footnote');
+ });
+
+ it('processes and displays reference definitions', async () => {
+ buildWrapper();
+
+ await contentEditorService.setSerializedContent(`
+[GitLab][gitlab]
+
+[gitlab]: https://gitlab.com
+ `);
+ await nextTick();
+
+ expect(wrapper.find('pre').text()).toContain('[gitlab]: https://gitlab.com');
+ });
+ });
+
+ it('renders table of contents', async () => {
+ jest.useFakeTimers();
+
buildWrapper();
renderMarkdown.mockResolvedValue(`
- <p data-sourcepos="3:1-3:56" dir="auto">
- This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-footnote-2717" id="fnref-footnote-2717" data-footnote-ref="">2</a></sup>
- </p>
- <section class="footnotes" data-footnotes>
- <ol>
- <li id="fn-footnote-2717">
- <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-footnote-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- </ol>
- </section>
+<ul class="section-nav">
+</ul>
+<h1 dir="auto" data-sourcepos="3:1-3:11">
+ Heading 1
+</h1>
+<h2 dir="auto" data-sourcepos="5:1-5:12">
+ Heading 2
+</h2>
`);
await contentEditorService.setSerializedContent(`
- This reference tag is a mix of letters and numbers [^footnote].
+[TOC]
- [^footnote]: This is another footnote.
+# Heading 1
+
+## Heading 2
`);
+
await nextTick();
+ jest.runAllTimers();
- expect(wrapper.text()).toContain('footnote: This is another footnote');
+ expect(wrapper.findByTestId('table-of-contents').text()).toContain('Heading 1');
+ expect(wrapper.findByTestId('table-of-contents').text()).toContain('Heading 2');
});
});
diff --git a/spec/frontend_integration/fly_out_nav_browser_spec.js b/spec/frontend_integration/fly_out_nav_browser_spec.js
index 47f3c6a0ac2..07ddc0220e6 100644
--- a/spec/frontend_integration/fly_out_nav_browser_spec.js
+++ b/spec/frontend_integration/fly_out_nav_browser_spec.js
@@ -308,19 +308,19 @@ describe('Fly out sidebar navigation', () => {
describe('canShowSubItems', () => {
it('returns true if on desktop size', () => {
- expect(canShowSubItems()).toBeTruthy();
+ expect(canShowSubItems()).toBe(true);
});
it('returns false if on mobile size', () => {
breakpointSize = 'xs';
- expect(canShowSubItems()).toBeFalsy();
+ expect(canShowSubItems()).toBe(false);
});
});
describe('canShowActiveSubItems', () => {
it('returns true by default', () => {
- expect(canShowActiveSubItems(el)).toBeTruthy();
+ expect(canShowActiveSubItems(el)).toBe(true);
});
it('returns false when active & expanded sidebar', () => {
@@ -329,7 +329,7 @@ describe('Fly out sidebar navigation', () => {
setSidebar(sidebar);
- expect(canShowActiveSubItems(el)).toBeFalsy();
+ expect(canShowActiveSubItems(el)).toBe(false);
});
it('returns true when active & collapsed sidebar', () => {
@@ -339,7 +339,7 @@ describe('Fly out sidebar navigation', () => {
setSidebar(sidebar);
- expect(canShowActiveSubItems(el)).toBeTruthy();
+ expect(canShowActiveSubItems(el)).toBe(true);
});
});
diff --git a/spec/frontend_integration/ide/helpers/start.js b/spec/frontend_integration/ide/helpers/start.js
index 3c5ed9dfe20..925db12f36e 100644
--- a/spec/frontend_integration/ide/helpers/start.js
+++ b/spec/frontend_integration/ide/helpers/start.js
@@ -1,5 +1,4 @@
-/* global monaco */
-
+import { editor as monacoEditor } from 'monaco-editor';
import setWindowLocation from 'helpers/set_window_location_helper';
import { TEST_HOST } from 'helpers/test_constants';
import { initIde } from '~/ide';
@@ -20,7 +19,7 @@ export default (container, { isRepoEmpty = false, path = '', mrId = '' } = {}) =
const vm = initIde(el, { extendStore });
// We need to dispose of editor Singleton things or tests will bump into eachother
- vm.$on('destroy', () => monaco.editor.getModels().forEach((model) => model.dispose()));
+ vm.$on('destroy', () => monacoEditor.getModels().forEach((model) => model.dispose()));
return vm;
};
diff --git a/spec/frontend_integration/ide/ide_integration_spec.js b/spec/frontend_integration/ide/ide_integration_spec.js
index da48c600764..a6108fd71e1 100644
--- a/spec/frontend_integration/ide/ide_integration_spec.js
+++ b/spec/frontend_integration/ide/ide_integration_spec.js
@@ -1,6 +1,5 @@
import { nextTick } from 'vue';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import { setTestTimeout } from 'helpers/timeout';
import waitForPromises from 'helpers/wait_for_promises';
import { waitForText } from 'helpers/wait_for_text';
import { useOverclockTimers } from 'test_helpers/utils/overclock_timers';
@@ -17,9 +16,6 @@ describe('WebIDE', () => {
beforeEach(() => {
stubPerformanceWebAPI();
- // For some reason these tests were timing out in CI.
- // We will investigate in https://gitlab.com/gitlab-org/gitlab/-/issues/298714
- setTestTimeout(20000);
setHTMLFixture('<div class="webide-container"></div>');
container = document.querySelector('.webide-container');
});
diff --git a/spec/frontend_integration/snippets/snippets_notes_spec.js b/spec/frontend_integration/snippets/snippets_notes_spec.js
index fdd3289bf58..5e9eaa1aada 100644
--- a/spec/frontend_integration/snippets/snippets_notes_spec.js
+++ b/spec/frontend_integration/snippets/snippets_notes_spec.js
@@ -50,6 +50,11 @@ describe('Integration Snippets notes', () => {
'circled latin capital letter m',
],
],
+ [':', ['100', '1234', '8ball', 'a', 'ab']],
+ // We do not want the search to start with space https://gitlab.com/gitlab-org/gitlab/-/issues/322548
+ [': ', []],
+ // We want to preserve that we can have space INSIDE the search
+ [':red ci', ['large red circle']],
])('shows a correct list of matching emojis when user enters %s', async (input, expected) => {
fillNoteTextarea(input);
diff --git a/spec/frontend_integration/test_helpers/setup/setup_globals.js b/spec/frontend_integration/test_helpers/setup/setup_globals.js
index ac5aeb1dd72..4f2eced40a5 100644
--- a/spec/frontend_integration/test_helpers/setup/setup_globals.js
+++ b/spec/frontend_integration/test_helpers/setup/setup_globals.js
@@ -1,7 +1,3 @@
-import { initializeTestTimeout } from 'helpers/timeout';
-
-initializeTestTimeout(process.env.CI ? 20000 : 7000);
-
beforeEach(() => {
window.gon = {
api_version: 'v4',
diff --git a/spec/graphql/features/feature_flag_spec.rb b/spec/graphql/features/feature_flag_spec.rb
index e5560fccf89..b06718eb16a 100644
--- a/spec/graphql/features/feature_flag_spec.rb
+++ b/spec/graphql/features/feature_flag_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe 'Graphql Field feature flags' do
let(:query_type) do
query_factory do |query|
- query.field :item, type, null: true, feature_flag: feature_flag, resolver: new_resolver(test_object)
+ query.field :item, type, null: true, _deprecated_feature_flag: feature_flag, resolver: new_resolver(test_object)
end
end
diff --git a/spec/graphql/graphql_triggers_spec.rb b/spec/graphql/graphql_triggers_spec.rb
index 84af33a5cb3..5e2ab74a0e5 100644
--- a/spec/graphql/graphql_triggers_spec.rb
+++ b/spec/graphql/graphql_triggers_spec.rb
@@ -47,4 +47,18 @@ RSpec.describe GraphqlTriggers do
GraphqlTriggers.issuable_labels_updated(issue)
end
end
+
+ describe '.issuable_dates_updated' do
+ it 'triggers the issuableDatesUpdated subscription' do
+ work_item = create(:work_item)
+
+ expect(GitlabSchema.subscriptions).to receive(:trigger).with(
+ 'issuableDatesUpdated',
+ { issuable_id: work_item.to_gid },
+ work_item
+ ).and_call_original
+
+ GraphqlTriggers.issuable_dates_updated(work_item)
+ end
+ end
end
diff --git a/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb b/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb
new file mode 100644
index 00000000000..f47f1b9869e
--- /dev/null
+++ b/spec/graphql/mutations/ci/runner/bulk_delete_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Ci::Runner::BulkDelete do
+ include GraphqlHelpers
+
+ let_it_be(:admin_user) { create(:user, :admin) }
+ let_it_be(:user) { create(:user) }
+
+ let(:current_ctx) { { current_user: user } }
+
+ let(:mutation_params) do
+ {}
+ end
+
+ describe '#resolve' do
+ subject(:response) do
+ sync(resolve(described_class, args: mutation_params, ctx: current_ctx))
+ end
+
+ context 'when the user cannot admin the runner' do
+ let(:runner) { create(:ci_runner) }
+ let(:mutation_params) do
+ { ids: [runner.to_global_id] }
+ end
+
+ it 'generates an error' do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) { response }
+ end
+ end
+
+ context 'when user can delete runners' do
+ let(:user) { admin_user }
+ let!(:runners) do
+ create_list(:ci_runner, 2, :instance)
+ end
+
+ context 'when required arguments are missing' do
+ let(:mutation_params) { {} }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'does not return an error' do
+ is_expected.to match a_hash_including(errors: [])
+ end
+ end
+ end
+
+ context 'with runners specified by id' do
+ let(:mutation_params) do
+ { ids: runners.map(&:to_global_id) }
+ end
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'deletes runners', :aggregate_failures do
+ expect_next_instance_of(
+ ::Ci::Runners::BulkDeleteRunnersService, { runners: runners }
+ ) do |service|
+ expect(service).to receive(:execute).once.and_call_original
+ end
+
+ expect { response }.to change { Ci::Runner.count }.by(-2)
+ expect(response[:errors]).to be_empty
+ end
+
+ context 'when runner list is is above limit' do
+ before do
+ stub_const('::Ci::Runners::BulkDeleteRunnersService::RUNNER_LIMIT', 1)
+ end
+
+ it 'only deletes up to the defined limit', :aggregate_failures do
+ expect { response }.to change { Ci::Runner.count }
+ .by(-::Ci::Runners::BulkDeleteRunnersService::RUNNER_LIMIT)
+ expect(response[:errors]).to be_empty
+ end
+ end
+ end
+
+ context 'when admin mode is disabled', :aggregate_failures do
+ it 'returns error', :aggregate_failures do
+ expect do
+ expect_graphql_error_to_be_created(Gitlab::Graphql::Errors::ResourceNotAvailable) do
+ response
+ end
+ end.not_to change { Ci::Runner.count }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/ci/runner/update_spec.rb b/spec/graphql/mutations/ci/runner/update_spec.rb
index ffaa6e93d1b..b8efd4213fa 100644
--- a/spec/graphql/mutations/ci/runner/update_spec.rb
+++ b/spec/graphql/mutations/ci/runner/update_spec.rb
@@ -2,12 +2,11 @@
require 'spec_helper'
-RSpec.describe 'Mutations::Ci::Runner::Update' do
+RSpec.describe Mutations::Ci::Runner::Update do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:runner) { create(:ci_runner, active: true, locked: false, run_untagged: true) }
- let_it_be(:described_class) { Mutations::Ci::Runner::Update }
let(:current_ctx) { { current_user: user } }
let(:mutated_runner) { subject[:runner] }
diff --git a/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb b/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
index 8296e5c6c15..102d33378c6 100644
--- a/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
+++ b/spec/graphql/mutations/incident_management/timeline_event/update_spec.rb
@@ -57,17 +57,40 @@ RSpec.describe Mutations::IncidentManagement::TimelineEvent::Update do
end
context 'when there is a validation error' do
- let(:occurred_at) { 'invalid date' }
+ context 'when note is blank' do
+ let(:note) { '' }
- it 'does not update the timeline event' do
- expect { resolve }.not_to change { timeline_event.reload.updated_at }
+ it 'does not update the timeline event' do
+ expect { resolve }.not_to change { timeline_event.reload.updated_at }
+ end
+
+ it 'responds with error' do
+ expect(resolve).to eq(timeline_event: nil, errors: ["Note can't be blank"])
+ end
end
- it 'responds with error' do
- expect(resolve).to eq(
- timeline_event: nil,
- errors: ["Occurred at can't be blank"]
- )
+ context 'when occurred_at is blank' do
+ let(:occurred_at) { '' }
+
+ it 'does not update the timeline event' do
+ expect { resolve }.not_to change { timeline_event.reload.updated_at }
+ end
+
+ it 'responds with error' do
+ expect(resolve).to eq(timeline_event: nil, errors: ["Occurred at can't be blank"])
+ end
+ end
+
+ context 'when occurred_at is invalid' do
+ let(:occurred_at) { 'invalid date' }
+
+ it 'does not update the timeline event' do
+ expect { resolve }.not_to change { timeline_event.reload.updated_at }
+ end
+
+ it 'responds with error' do
+ expect(resolve).to eq(timeline_event: nil, errors: ["Occurred at can't be blank"])
+ end
end
end
end
diff --git a/spec/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
index 1bb303cf99b..44bd9342b8e 100644
--- a/spec/graphql/mutations/merge_requests/set_labels_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Mutations::MergeRequests::SetLabels do
end
context 'when passing operation_mode as REMOVE' do
- subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, label_ids: label_ids, operation_mode: Types::MutationOperationModeEnum.enum[:remove])}
+ subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, label_ids: label_ids, operation_mode: Types::MutationOperationModeEnum.enum[:remove]) }
it 'removes the labels, without removing others' do
merge_request.update!(labels: [label, label2])
diff --git a/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb b/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb
new file mode 100644
index 00000000000..df4aa885bbf
--- /dev/null
+++ b/spec/graphql/mutations/merge_requests/set_reviewers_spec.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::MergeRequests::SetReviewers do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:merge_request, reload: true) { create(:merge_request) }
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:reviewer2) { create(:user) }
+
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
+
+ describe '#resolve' do
+ let(:reviewer_usernames) { [reviewer.username] }
+ let(:mutated_merge_request) { subject[:merge_request] }
+ let(:mode) { described_class.arguments['operationMode'].default_value }
+
+ subject do
+ mutation.resolve(project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ operation_mode: mode,
+ reviewer_usernames: reviewer_usernames)
+ end
+
+ it 'does not change reviewers if the merge_request is not accessible to the reviewers' do
+ merge_request.project.add_developer(user)
+
+ expect { subject }.not_to change { merge_request.reload.reviewer_ids }
+ end
+
+ it 'returns an operational error if the merge_request is not accessible to the reviewers' do
+ merge_request.project.add_developer(user)
+
+ result = subject
+
+ expect(result[:errors]).to include a_string_matching(/Cannot assign/)
+ end
+
+ context 'when the user does not have permissions' do
+ it_behaves_like 'permission level for merge request mutation is correctly verified'
+ end
+
+ context 'when the user can update the merge_request' do
+ before do
+ merge_request.project.add_developer(reviewer)
+ merge_request.project.add_developer(reviewer2)
+ merge_request.project.add_developer(user)
+ end
+
+ it 'replaces the reviewer' do
+ merge_request.reviewers = [reviewer2]
+ merge_request.save!
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.reviewers).to contain_exactly(reviewer)
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'returns errors when merge_request could not be updated' do
+ allow(merge_request).to receive(:errors_on_object).and_return(['foo'])
+
+ expect(subject[:errors]).not_to match_array(['foo'])
+ end
+
+ context 'when passing an empty reviewer list' do
+ let(:reviewer_usernames) { [] }
+
+ before do
+ merge_request.reviewers = [reviewer]
+ merge_request.save!
+ end
+
+ it 'removes all reviewers' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.reviewers).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "append" as true' do
+ subject do
+ mutation.resolve(
+ project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ reviewer_usernames: reviewer_usernames,
+ operation_mode: Types::MutationOperationModeEnum.enum[:append]
+ )
+ end
+
+ before do
+ merge_request.reviewers = [reviewer2]
+ merge_request.save!
+
+ # In CE, APPEND is a NOOP as you can't have multiple reviewers
+ # We test multiple assignment in EE specs
+ stub_licensed_features(multiple_merge_request_reviewers: false)
+ end
+
+ it 'is a NO-OP in FOSS' do
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.reviewers).to contain_exactly(reviewer2)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+
+ context 'when passing "remove" as true' do
+ before do
+ merge_request.reviewers = [reviewer]
+ merge_request.save!
+ end
+
+ it 'removes named reviewer' do
+ mutated_merge_request = mutation.resolve(
+ project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ reviewer_usernames: reviewer_usernames,
+ operation_mode: Types::MutationOperationModeEnum.enum[:remove]
+ )[:merge_request]
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.reviewers).to eq([])
+ expect(subject[:errors]).to be_empty
+ end
+
+ it 'does not remove unnamed reviewer' do
+ mutated_merge_request = mutation.resolve(
+ project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ reviewer_usernames: [reviewer2.username],
+ operation_mode: Types::MutationOperationModeEnum.enum[:remove]
+ )[:merge_request]
+
+ expect(mutated_merge_request).to eq(merge_request)
+ expect(mutated_merge_request.reviewers).to contain_exactly(reviewer)
+ expect(subject[:errors]).to be_empty
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/releases/create_spec.rb b/spec/graphql/mutations/releases/create_spec.rb
index 1f2c3ed537f..b6b9449aa39 100644
--- a/spec/graphql/mutations/releases/create_spec.rb
+++ b/spec/graphql/mutations/releases/create_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe Mutations::Releases::Create do
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
- let(:tag) { 'v1.1.0'}
- let(:ref) { 'master'}
- let(:name) { 'Version 1.0'}
+ let(:tag) { 'v1.1.0' }
+ let(:ref) { 'master' }
+ let(:name) { 'Version 1.0' }
let(:description) { 'The first release :rocket:' }
let(:released_at) { Time.parse('2018-12-10') }
let(:milestones) { [milestone_12_3.title, milestone_12_4.title] }
diff --git a/spec/graphql/mutations/releases/delete_spec.rb b/spec/graphql/mutations/releases/delete_spec.rb
index 9934aea0031..09b420fe1ea 100644
--- a/spec/graphql/mutations/releases/delete_spec.rb
+++ b/spec/graphql/mutations/releases/delete_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Mutations::Releases::Delete do
let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
let_it_be(:maintainer) { create(:user) }
- let_it_be(:tag) { 'v1.1.0'}
+ let_it_be(:tag) { 'v1.1.0' }
let_it_be(:release) { create(:release, project: project, tag: tag) }
let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
diff --git a/spec/graphql/mutations/releases/update_spec.rb b/spec/graphql/mutations/releases/update_spec.rb
index 9fae703b85a..15b10ea0648 100644
--- a/spec/graphql/mutations/releases/update_spec.rb
+++ b/spec/graphql/mutations/releases/update_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Mutations::Releases::Update do
let_it_be(:reporter) { create(:user) }
let_it_be(:developer) { create(:user) }
- let_it_be(:tag) { 'v1.1.0'}
- let_it_be(:name) { 'Version 1.0'}
+ let_it_be(:tag) { 'v1.1.0' }
+ let_it_be(:name) { 'Version 1.0' }
let_it_be(:description) { 'The first release :rocket:' }
let_it_be(:released_at) { Time.parse('2018-12-10').utc }
let_it_be(:created_at) { Time.parse('2018-11-05').utc }
diff --git a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
index 53b673e255b..ba8a127bec5 100644
--- a/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_jobs_resolver_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Resolvers::Ci::RunnerJobsResolver do
let!(:build_one) { create(:ci_build, :success, name: 'Build One', runner: runner, pipeline: pipeline) }
let!(:build_two) { create(:ci_build, :success, name: 'Build Two', runner: runner, pipeline: pipeline) }
let!(:build_three) { create(:ci_build, :failed, name: 'Build Three', runner: runner, pipeline: pipeline) }
- let!(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline)}
+ let!(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline) }
let(:args) { {} }
let(:runner) { create(:ci_runner, :project, projects: [project]) }
diff --git a/spec/graphql/resolvers/crm/contact_state_counts_resolver_spec.rb b/spec/graphql/resolvers/crm/contact_state_counts_resolver_spec.rb
new file mode 100644
index 00000000000..0128ec792b3
--- /dev/null
+++ b/spec/graphql/resolvers/crm/contact_state_counts_resolver_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Crm::ContactStateCountsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ before_all do
+ create(:contact, group: group, email: "x@test.com")
+ create(:contact, group: group, email: "y@test.com", state: 'inactive')
+ create_list(:contact, 3, group: group)
+ create_list(:contact, 2, group: group, state: 'inactive')
+ end
+
+ describe '#resolve' do
+ context 'with unauthorized user' do
+ it 'does not raise an error and returns no counts' do
+ expect { resolve_counts(group) }.not_to raise_error
+ expect(resolve_counts(group).all).to be(0)
+ end
+ end
+
+ context 'with authorized user' do
+ before do
+ group.add_reporter(user)
+ end
+
+ context 'without parent' do
+ it 'returns no counts' do
+ expect(resolve_counts(nil).all).to be(0)
+ end
+ end
+
+ context 'with a group' do
+ context 'when no filter is provided' do
+ it 'returns the count of all contacts' do
+ counts = resolve_counts(group)
+ expect(counts.all).to eq(7)
+ expect(counts.active).to eq(4)
+ expect(counts.inactive).to eq(3)
+ end
+ end
+
+ context 'when search term is provided' do
+ it 'returns the correct counts' do
+ counts = resolve_counts(group, { search: "@test.com" })
+
+ expect(counts.all).to be(2)
+ expect(counts.active).to be(1)
+ expect(counts.inactive).to be(1)
+ end
+ end
+ end
+ end
+ end
+
+ def resolve_counts(parent, args = {}, context = { current_user: user })
+ resolve(described_class, obj: parent, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/crm/contacts_resolver_spec.rb b/spec/graphql/resolvers/crm/contacts_resolver_spec.rb
index 98da4aeac28..c7c2d11e114 100644
--- a/spec/graphql/resolvers/crm/contacts_resolver_spec.rb
+++ b/spec/graphql/resolvers/crm/contacts_resolver_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Resolvers::Crm::ContactsResolver do
last_name: "DEF",
email: "ghi@test.com",
description: "LMNO",
+ organization: create(:organization, group: group),
state: "inactive"
)
end
@@ -61,11 +62,29 @@ RSpec.describe Resolvers::Crm::ContactsResolver do
end
context 'when no filter is provided' do
- it 'returns all the contacts in the correct order' do
+ it 'returns all the contacts in the default order' do
expect(resolve_contacts(group)).to eq([contact_a, contact_b])
end
end
+ context 'when a sort is provided' do
+ it 'returns all the contacts in the correct order' do
+ expect(resolve_contacts(group, { sort: 'EMAIL_DESC' })).to eq([contact_b, contact_a])
+ end
+ end
+
+ context 'when a sort is provided needing offset_pagination' do
+ it 'returns all the contacts in the correct order' do
+ expect(resolve_contacts(group, { sort: 'ORGANIZATION_ASC' })).to eq([contact_a, contact_b])
+ end
+ end
+
+ context 'when filtering for all states' do
+ it 'returns all the contacts in the correct order' do
+ expect(resolve_contacts(group, { state: 'all' })).to eq([contact_a, contact_b])
+ end
+ end
+
context 'when search term is provided' do
it 'returns the correct contacts' do
expect(resolve_contacts(group, { search: "x@test.com" })).to contain_exactly(contact_b)
diff --git a/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb b/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb
index 8d0b8f9398d..e1c67bc7c18 100644
--- a/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_members/notification_email_resolver_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Resolvers::GroupMembers::NotificationEmailResolver do
expect(described_class).to have_nullable_graphql_type(GraphQL::Types::String)
end
- subject { batch_sync { resolve_notification_email(developer.group_members.first, current_user) }}
+ subject { batch_sync { resolve_notification_email(developer.group_members.first, current_user) } }
context 'when current_user is admin' do
let(:current_user) { create(:user, :admin) }
diff --git a/spec/graphql/resolvers/project_jobs_resolver_spec.rb b/spec/graphql/resolvers/project_jobs_resolver_spec.rb
index bb711a4c857..eb9d31ea7e5 100644
--- a/spec/graphql/resolvers/project_jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_jobs_resolver_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Resolvers::ProjectJobsResolver do
let_it_be(:failed_build) { create(:ci_build, :failed, name: 'Build Three', pipeline: pipeline) }
let_it_be(:pending_build) { create(:ci_build, :pending, name: 'Build Three', pipeline: pipeline) }
- let(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline)}
+ let(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline) }
let(:args) { {} }
let(:current_user) { create(:user) }
diff --git a/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb b/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
new file mode 100644
index 00000000000..ef1b18f0a11
--- /dev/null
+++ b/spec/graphql/resolvers/projects/fork_targets_resolver_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Projects::ForkTargetsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group, path: 'namespace-group') }
+ let_it_be(:another_group) { create(:group, path: 'namespace-another-group') }
+ let_it_be(:project) { create(:project, :private, group: group) }
+ let_it_be(:user) { create(:user, username: 'namespace-user', maintainer_projects: [project]) }
+
+ let(:args) { { search: 'namespace' } }
+
+ describe '#resolve' do
+ before_all do
+ group.add_owner(user)
+ another_group.add_owner(user)
+ end
+
+ it 'returns forkable namespaces' do
+ expect_next_instance_of(ForkTargetsFinder) do |finder|
+ expect(finder).to receive(:execute).with(args).and_call_original
+ end
+
+ expect(resolve_targets(args).items).to match_array([user.namespace, project.namespace, another_group])
+ end
+ end
+
+ context 'when a user cannot fork the project' do
+ let(:user) { create(:user) }
+
+ it 'does not return results' do
+ project.add_guest(user)
+
+ expect(resolve_targets(args)).to be_nil
+ end
+ end
+
+ def resolve_targets(args, opts = {})
+ field_options = described_class.field_options.merge(
+ owner: resolver_parent,
+ name: 'field_value'
+ ).merge(opts)
+
+ field = ::Types::BaseField.new(**field_options)
+ resolve_field(field, project, args: args, ctx: { current_user: user }, object_type: resolver_parent)
+ end
+end
diff --git a/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb b/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb
index 854e763fbdd..546b8592546 100644
--- a/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/grafana_integration_resolver_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Resolvers::Projects::GrafanaIntegrationResolver do
let_it_be(:project) { create(:project) }
let_it_be(:current_user) { create(:user) }
- let_it_be(:grafana_integration) { create(:grafana_integration, project: project)}
+ let_it_be(:grafana_integration) { create(:grafana_integration, project: project) }
describe '#resolve' do
context 'when object is not a project' do
@@ -19,7 +19,7 @@ RSpec.describe Resolvers::Projects::GrafanaIntegrationResolver do
end
context 'when object is nil' do
- it { expect(resolve_integration(obj: nil)).to eq nil}
+ it { expect(resolve_integration(obj: nil)).to eq nil }
end
end
diff --git a/spec/graphql/resolvers/projects_resolver_spec.rb b/spec/graphql/resolvers/projects_resolver_spec.rb
index 2685115d1a2..453fafb9590 100644
--- a/spec/graphql/resolvers/projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects_resolver_spec.rb
@@ -142,7 +142,7 @@ RSpec.describe Resolvers::ProjectsResolver do
context 'when no sort is provided' do
it 'returns projects in descending order by id' do
- is_expected.to match_array((visible_projecs + named_projects).sort_by { |p| p[:id]}.reverse )
+ is_expected.to match_array((visible_projecs + named_projects).sort_by { |p| p[:id] }.reverse )
end
end
end
diff --git a/spec/graphql/types/base_field_spec.rb b/spec/graphql/types/base_field_spec.rb
index 439678e7e16..b85716e4d21 100644
--- a/spec/graphql/types/base_field_spec.rb
+++ b/spec/graphql/types/base_field_spec.rb
@@ -209,7 +209,7 @@ RSpec.describe Types::BaseField do
describe '#visible?' do
context 'and has a feature_flag' do
let(:flag) { :test_feature }
- let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, feature_flag: flag, null: false) }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, _deprecated_feature_flag: flag, null: false) }
let(:context) { {} }
before do
@@ -253,7 +253,7 @@ RSpec.describe Types::BaseField do
describe '#description' do
context 'feature flag given' do
- let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, feature_flag: flag, null: false, description: 'Test description.') }
+ let(:field) { described_class.new(name: 'test', type: GraphQL::Types::String, _deprecated_feature_flag: flag, null: false, description: 'Test description.') }
let(:flag) { :test_flag }
it 'prepends the description' do
@@ -299,7 +299,7 @@ RSpec.describe Types::BaseField do
end
it 'returns the correct availability in the description' do
- expect(field.description). to eq expected_description
+ expect(field.description).to eq expected_description
end
end
end
@@ -313,11 +313,11 @@ RSpec.describe Types::BaseField do
described_class.new(**base_args.merge(args))
end
- it 'interacts well with the `feature_flag` property' do
+ it 'interacts well with the `_deprecated_feature_flag` property' do
field = subject(
deprecated: { milestone: '1.10', reason: 'Deprecation reason' },
description: 'Field description.',
- feature_flag: 'foo_flag'
+ _deprecated_feature_flag: 'foo_flag'
)
expect(field.description).to start_with('Field description. Available only when feature flag `foo_flag` is enabled.')
diff --git a/spec/graphql/types/ci/group_variable_type_spec.rb b/spec/graphql/types/ci/group_variable_type_spec.rb
new file mode 100644
index 00000000000..106935642f2
--- /dev/null
+++ b/spec/graphql/types/ci/group_variable_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiGroupVariable'] do
+ specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
+
+ specify { expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected).at_least }
+end
diff --git a/spec/graphql/types/ci/instance_variable_type_spec.rb b/spec/graphql/types/ci/instance_variable_type_spec.rb
new file mode 100644
index 00000000000..cf4aaed31f1
--- /dev/null
+++ b/spec/graphql/types/ci/instance_variable_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiInstanceVariable'] do
+ specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
+
+ specify { expect(described_class).to have_graphql_fields(:masked, :protected).at_least }
+end
diff --git a/spec/graphql/types/ci/job_token_scope_type_spec.rb b/spec/graphql/types/ci/job_token_scope_type_spec.rb
index c1a3c4dd54d..457d46b6896 100644
--- a/spec/graphql/types/ci/job_token_scope_type_spec.rb
+++ b/spec/graphql/types/ci/job_token_scope_type_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe GitlabSchema.types['CiJobTokenScopeType'] do
subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
let(:projects_field) { subject.dig('data', 'project', 'ciJobTokenScope', 'projects', 'nodes') }
- let(:returned_project_paths) { projects_field.map { |project| project['path']} }
+ let(:returned_project_paths) { projects_field.map { |project| project['path'] } }
context 'with access to scope' do
before do
diff --git a/spec/graphql/types/ci/manual_variable_type_spec.rb b/spec/graphql/types/ci/manual_variable_type_spec.rb
new file mode 100644
index 00000000000..2884c818a52
--- /dev/null
+++ b/spec/graphql/types/ci/manual_variable_type_spec.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiManualVariable'] do
+ specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
+end
diff --git a/spec/graphql/types/ci/project_variable_type_spec.rb b/spec/graphql/types/ci/project_variable_type_spec.rb
new file mode 100644
index 00000000000..e6e045b2bca
--- /dev/null
+++ b/spec/graphql/types/ci/project_variable_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiProjectVariable'] do
+ specify { expect(described_class.interfaces).to contain_exactly(Types::Ci::VariableInterface) }
+
+ specify { expect(described_class).to have_graphql_fields(:environment_scope, :masked, :protected).at_least }
+end
diff --git a/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb b/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb
index 03c784dcbe7..ef378f3fc5a 100644
--- a/spec/graphql/types/ci/runner_upgrade_status_type_enum_spec.rb
+++ b/spec/graphql/types/ci/runner_upgrade_status_enum_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe Types::Ci::RunnerUpgradeStatusTypeEnum do
+RSpec.describe Types::Ci::RunnerUpgradeStatusEnum do
let(:model_only_enum_values) { %w[not_processed] }
let(:expected_graphql_source_values) do
Ci::RunnerVersion.statuses.keys - model_only_enum_values
end
- specify { expect(described_class.graphql_name).to eq('CiRunnerUpgradeStatusType') }
+ specify { expect(described_class.graphql_name).to eq('CiRunnerUpgradeStatus') }
it 'exposes all upgrade status values except not_processed' do
expect(described_class.values.keys).to match_array(
diff --git a/spec/graphql/types/ci/variable_input_type_spec.rb b/spec/graphql/types/ci/variable_input_type_spec.rb
new file mode 100644
index 00000000000..a56b6287dee
--- /dev/null
+++ b/spec/graphql/types/ci/variable_input_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiVariableInput'] do
+ include GraphqlHelpers
+
+ it 'has the correct arguments' do
+ expect(described_class.arguments.keys).to match_array(%w[key value])
+ end
+end
diff --git a/spec/graphql/types/ci/variable_type_spec.rb b/spec/graphql/types/ci/variable_interface_spec.rb
index a81e6adbab6..8cef0ac2a14 100644
--- a/spec/graphql/types/ci/variable_type_spec.rb
+++ b/spec/graphql/types/ci/variable_interface_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['CiVariable'] do
- it 'contains attributes related to CI variables' do
+ specify do
expect(described_class).to have_graphql_fields(
- :id, :key, :value, :variable_type, :protected, :masked, :raw, :environment_scope
- )
+ :id, :key, :value, :variable_type, :raw
+ ).at_least
end
end
diff --git a/spec/graphql/types/customer_relations/contact_sort_enum_spec.rb b/spec/graphql/types/customer_relations/contact_sort_enum_spec.rb
new file mode 100644
index 00000000000..5b0538042c8
--- /dev/null
+++ b/spec/graphql/types/customer_relations/contact_sort_enum_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ContactSort'] do
+ specify { expect(described_class.graphql_name).to eq('ContactSort') }
+
+ it_behaves_like 'common sort values'
+
+ it 'exposes all the contact sort values' do
+ expect(described_class.values.keys).to include(
+ *%w[
+ FIRST_NAME_ASC
+ FIRST_NAME_DESC
+ LAST_NAME_ASC
+ LAST_NAME_DESC
+ EMAIL_ASC
+ EMAIL_DESC
+ PHONE_ASC
+ PHONE_DESC
+ DESCRIPTION_ASC
+ DESCRIPTION_DESC
+ ORGANIZATION_ASC
+ ORGANIZATION_DESC
+ ]
+ )
+ end
+end
diff --git a/spec/graphql/types/customer_relations/contact_state_counts_type_spec.rb b/spec/graphql/types/customer_relations/contact_state_counts_type_spec.rb
new file mode 100644
index 00000000000..b022febb90f
--- /dev/null
+++ b/spec/graphql/types/customer_relations/contact_state_counts_type_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ContactStateCounts'] do
+ let(:fields) do
+ %w[
+ all
+ active
+ inactive
+ ]
+ end
+
+ it { expect(described_class.graphql_name).to eq('ContactStateCounts') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to require_graphql_authorizations(:read_crm_contact) }
+end
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index a57db9234f1..fa0b34113bc 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -114,7 +114,11 @@ RSpec.describe Types::GlobalIDType do
end
before do
- deprecation = Gitlab::GlobalId::Deprecations::Deprecation.new(old_model_name: 'OldIssue', new_model_name: 'Issue', milestone: '10.0')
+ deprecation = Gitlab::GlobalId::Deprecations::NameDeprecation.new(
+ old_name: 'OldIssue',
+ new_name: 'Issue',
+ milestone: '10.0'
+ )
stub_global_id_deprecations(deprecation)
end
diff --git a/spec/graphql/types/group_type_spec.rb b/spec/graphql/types/group_type_spec.rb
index 69c7eaf111f..72b3bb90194 100644
--- a/spec/graphql/types/group_type_spec.rb
+++ b/spec/graphql/types/group_type_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Group'] do
+ include GraphqlHelpers
+
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Group) }
specify { expect(described_class.graphql_name).to eq('Group') }
@@ -22,8 +24,8 @@ RSpec.describe GitlabSchema.types['Group'] do
dependency_proxy_blobs dependency_proxy_image_count
dependency_proxy_blob_count dependency_proxy_total_size
dependency_proxy_image_prefix dependency_proxy_image_ttl_policy
- shared_runners_setting timelogs organizations contacts work_item_types
- recent_issue_boards ci_variables
+ shared_runners_setting timelogs organizations contacts contact_state_counts
+ work_item_types recent_issue_boards ci_variables
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -53,7 +55,52 @@ RSpec.describe GitlabSchema.types['Group'] do
end
end
+ describe 'contact_state_counts field' do
+ subject { described_class.fields['contactStateCounts'] }
+
+ it { is_expected.to have_graphql_type(Types::CustomerRelations::ContactStateCountsType) }
+ it { is_expected.to have_graphql_resolver(Resolvers::Crm::ContactStateCountsResolver) }
+ end
+
it_behaves_like 'a GraphQL type with labels' do
let(:labels_resolver_arguments) { [:search_term, :includeAncestorGroups, :includeDescendantGroups, :onlyGroupLabels] }
end
+
+ describe 'milestones' do
+ let(:user) { create(:user) }
+ let(:subgroup) { create(:group, parent: create(:group)) }
+ let(:query) do
+ %(
+ query {
+ group(fullPath: "#{subgroup.full_path}") {
+ milestones {
+ nodes {
+ id
+ title
+ projectMilestone
+ groupMilestone
+ subgroupMilestone
+ }
+ }
+ }
+ }
+ )
+ end
+
+ def clean_state_query
+ run_with_clean_state(query, context: { current_user: user })
+ end
+
+ it 'avoids N+1 queries' do
+ subgroup.add_reporter(user)
+
+ create(:milestone, group: subgroup)
+
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { clean_state_query }
+
+ create_list(:milestone, 2, group: subgroup)
+
+ expect { clean_state_query }.not_to exceed_all_query_limit(control)
+ end
+ end
end
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index e7454b85357..2a0ae79b2c4 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -167,7 +167,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
shared_examples_for 'does not include private notes' do
it "does not return private notes" do
notes = subject.dig("data", "project", "issue", "notes", 'edges')
- notes_body = notes.map {|n| n.dig('node', 'body')}
+ notes_body = notes.map { |n| n.dig('node', 'body') }
expect(notes.size).to eq 1
expect(notes_body).not_to include(private_note_body)
@@ -178,7 +178,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
shared_examples_for 'includes private notes' do
it "returns all notes" do
notes = subject.dig("data", "project", "issue", "notes", 'edges')
- notes_body = notes.map {|n| n.dig('node', 'body')}
+ notes_body = notes.map { |n| n.dig('node', 'body') }
expect(notes.size).to eq 2
expect(notes_body).to include(private_note_body)
@@ -209,7 +209,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
describe 'hidden', :enable_admin_mode do
- let_it_be(:admin) { create(:user, :admin)}
+ let_it_be(:admin) { create(:user, :admin) }
let_it_be(:banned_user) { create(:user, :banned) }
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/graphql/types/namespace_type_spec.rb b/spec/graphql/types/namespace_type_spec.rb
index 3b7f7e65e4b..168a6ba4eaa 100644
--- a/spec/graphql/types/namespace_type_spec.rb
+++ b/spec/graphql/types/namespace_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe GitlabSchema.types['Namespace'] do
expected_fields = %w[
id name path full_name full_path description description_html visibility
lfs_enabled request_access_enabled projects root_storage_statistics shared_runners_setting
+ timelog_categories
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/notes/note_type_spec.rb b/spec/graphql/types/notes/note_type_spec.rb
index 03ff7828cf5..cbf7f086dbe 100644
--- a/spec/graphql/types/notes/note_type_spec.rb
+++ b/spec/graphql/types/notes/note_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe GitlabSchema.types['Note'] do
body
body_html
confidential
+ internal
created_at
discussion
id
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index ed93d31da0f..5ff7653ce39 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe GitlabSchema.types['Project'] do
cluster_agent cluster_agents agent_configurations
ci_template timelogs merge_commit_template squash_commit_template work_item_types
recent_issue_boards ci_config_path_or_default packages_cleanup_policy ci_variables
- recent_issue_boards ci_config_path_or_default ci_variables
+ timelog_categories fork_targets
]
expect(described_class).to include_graphql_fields(*expected_fields)
@@ -195,8 +195,8 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(secure_analyzers['type']).to eq('string')
expect(secure_analyzers['field']).to eq('SECURE_ANALYZERS_PREFIX')
expect(secure_analyzers['label']).to eq('Image prefix')
- expect(secure_analyzers['defaultValue']).to eq(secure_analyzers_prefix)
- expect(secure_analyzers['value']).to eq(secure_analyzers_prefix)
+ expect(secure_analyzers['defaultValue']).to eq('$CI_TEMPLATE_REGISTRY_HOST/security-products')
+ expect(secure_analyzers['value']).to eq('$CI_TEMPLATE_REGISTRY_HOST/security-products')
expect(secure_analyzers['size']).to eq('LARGE')
expect(secure_analyzers['options']).to be_nil
end
diff --git a/spec/graphql/types/projects/service_type_enum_spec.rb b/spec/graphql/types/projects/service_type_enum_spec.rb
index ead69e60f6c..f7256910bb0 100644
--- a/spec/graphql/types/projects/service_type_enum_spec.rb
+++ b/spec/graphql/types/projects/service_type_enum_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe GitlabSchema.types['ServiceType'] do
PIPELINES_EMAIL_SERVICE
PIVOTALTRACKER_SERVICE
PROMETHEUS_SERVICE
+ PUMBLE_SERVICE
PUSHOVER_SERVICE
REDMINE_SERVICE
SHIMO_SERVICE
diff --git a/spec/graphql/types/subscription_type_spec.rb b/spec/graphql/types/subscription_type_spec.rb
index 1a2629ed422..9b043fa52cf 100644
--- a/spec/graphql/types/subscription_type_spec.rb
+++ b/spec/graphql/types/subscription_type_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe GitlabSchema.types['Subscription'] do
issue_crm_contacts_updated
issuable_title_updated
issuable_labels_updated
+ issuable_dates_updated
]
expect(described_class).to have_graphql_fields(*expected_fields).only
diff --git a/spec/graphql/types/time_tracking/timelog_category_type_spec.rb b/spec/graphql/types/time_tracking/timelog_category_type_spec.rb
new file mode 100644
index 00000000000..a14069e8b58
--- /dev/null
+++ b/spec/graphql/types/time_tracking/timelog_category_type_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['TimeTrackingTimelogCategory'] do
+ let(:fields) do
+ %w[
+ id
+ name
+ description
+ color
+ billable
+ billing_rate
+ created_at
+ updated_at
+ ]
+ end
+
+ it { expect(described_class.graphql_name).to eq('TimeTrackingTimelogCategory') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to require_graphql_authorizations(:read_timelog_category) }
+end
diff --git a/spec/graphql/types/upload_type_spec.rb b/spec/graphql/types/upload_type_spec.rb
new file mode 100644
index 00000000000..2b959fbf105
--- /dev/null
+++ b/spec/graphql/types/upload_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['FileUpload'] do
+ it { expect(described_class).to require_graphql_authorizations(:read_upload) }
+
+ it 'has the expected fields' do
+ expected_fields = %w[id size path]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index fec6a771640..dcf25ff0667 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe GitlabSchema.types['User'] do
end
describe 'name field' do
- let_it_be(:admin) { create(:user, :admin)}
+ let_it_be(:admin) { create(:user, :admin) }
let_it_be(:user) { create(:user) }
let_it_be(:requested_user) { create(:user, name: 'John Smith') }
let_it_be(:requested_project_bot) { create(:user, :project_bot, name: 'Project bot') }
diff --git a/spec/graphql/types/work_item_type_spec.rb b/spec/graphql/types/work_item_type_spec.rb
index 7ed58786b5b..c556424b0b4 100644
--- a/spec/graphql/types/work_item_type_spec.rb
+++ b/spec/graphql/types/work_item_type_spec.rb
@@ -11,16 +11,21 @@ RSpec.describe GitlabSchema.types['WorkItem'] do
it 'has specific fields' do
fields = %i[
+ confidential
description
description_html
id
iid
lock_version
+ project
state title
title_html
userPermissions
widgets
work_item_type
+ created_at
+ updated_at
+ closed_at
]
fields.each do |field_name|
diff --git a/spec/graphql/types/work_items/widget_interface_spec.rb b/spec/graphql/types/work_items/widget_interface_spec.rb
index caf986c961f..b9e8edacf15 100644
--- a/spec/graphql/types/work_items/widget_interface_spec.rb
+++ b/spec/graphql/types/work_items/widget_interface_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Types::WorkItems::WidgetInterface do
WorkItems::Widgets::Description | Types::WorkItems::Widgets::DescriptionType
WorkItems::Widgets::Hierarchy | Types::WorkItems::Widgets::HierarchyType
WorkItems::Widgets::Assignees | Types::WorkItems::Widgets::AssigneesType
+ WorkItems::Widgets::Labels | Types::WorkItems::Widgets::LabelsType
end
with_them do
diff --git a/spec/graphql/types/work_items/widgets/assignees_input_type_spec.rb b/spec/graphql/types/work_items/widgets/assignees_input_type_spec.rb
new file mode 100644
index 00000000000..2fcda2a43be
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/assignees_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::WorkItems::Widgets::AssigneesInputType do
+ it { expect(described_class.graphql_name).to eq('WorkItemWidgetAssigneesInput') }
+
+ it { expect(described_class.arguments.keys).to match_array(%w[assigneeIds]) }
+end
diff --git a/spec/graphql/types/work_items/widgets/labels_type_spec.rb b/spec/graphql/types/work_items/widgets/labels_type_spec.rb
new file mode 100644
index 00000000000..028ebe979f3
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/labels_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::LabelsType do
+ it 'exposes the expected fields' do
+ expected_fields = %i[labels allowsScopedLabels type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widgets/start_and_due_date_type_spec.rb b/spec/graphql/types/work_items/widgets/start_and_due_date_type_spec.rb
new file mode 100644
index 00000000000..ddc26d964be
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/start_and_due_date_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::WorkItems::Widgets::StartAndDueDateType do
+ it 'exposes the expected fields' do
+ expected_fields = %i[due_date start_date type]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/work_items/widgets/start_and_due_date_update_input_type_spec.rb b/spec/graphql/types/work_items/widgets/start_and_due_date_update_input_type_spec.rb
new file mode 100644
index 00000000000..91631093e4e
--- /dev/null
+++ b/spec/graphql/types/work_items/widgets/start_and_due_date_update_input_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Types::WorkItems::Widgets::StartAndDueDateUpdateInputType do
+ it { expect(described_class.graphql_name).to eq('WorkItemWidgetStartAndDueDateUpdateInput') }
+
+ it { expect(described_class.arguments.keys).to contain_exactly('startDate', 'dueDate') }
+end
diff --git a/spec/helpers/admin/identities_helper_spec.rb b/spec/helpers/admin/identities_helper_spec.rb
new file mode 100644
index 00000000000..9a7fdd3aa69
--- /dev/null
+++ b/spec/helpers/admin/identities_helper_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::IdentitiesHelper do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:identity) { create(:identity, provider: 'ldapmain', extern_uid: 'ldap-uid') }
+
+ describe '#label_for_identity_provider' do
+ it 'shows label for identity provider' do
+ expect(helper.label_for_identity_provider(identity)).to eq 'ldap (ldapmain)'
+ end
+ end
+
+ describe '#provider_id_cell_testid' do
+ it 'shows blank provider id for data-testid' do
+ expect(helper.provider_id_cell_testid(identity)).to eq 'provider_id_blank'
+ end
+ end
+
+ describe '#provider_id' do
+ it 'shows no provider id' do
+ expect(helper.provider_id(identity)).to eq '-'
+ end
+ end
+
+ describe '#saml_group_cell_testid' do
+ it 'shows blank SAML group for data-testid' do
+ expect(helper.saml_group_cell_testid(identity)).to eq 'saml_group_blank'
+ end
+ end
+
+ describe '#saml_group_link' do
+ it 'shows no link to SAML group' do
+ expect(helper.saml_group_link(identity)).to eq '-'
+ end
+ end
+
+ describe '#identity_cells_to_render?' do
+ context 'without identities' do
+ it 'returns false' do
+ expect(helper.identity_cells_to_render?([], user)).to eq false
+ end
+ end
+
+ context 'with identities' do
+ it 'returns true' do
+ expect(helper.identity_cells_to_render?(identity, user)).to eq true
+ end
+ end
+ end
+
+ describe '#scim_identities_collection' do
+ it 'returns empty array' do
+ expect(helper.scim_identities_collection(user)).to eq []
+ end
+ end
+end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 47c31546629..264431b1bb5 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -192,6 +192,14 @@ RSpec.describe ApplicationHelper do
end
end
+ describe '#community_forum' do
+ subject { helper.community_forum }
+
+ it 'returns the url' do
+ is_expected.to eq("https://forum.gitlab.com")
+ end
+ end
+
describe '#support_url' do
context 'when alternate support url is specified' do
let(:alternate_url) { 'http://company.example.com/getting-help' }
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index 8d5dc3fb4be..ccc150c397a 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -130,6 +130,7 @@ RSpec.describe BoardsHelper do
it 'returns can_admin_list as false by default' do
expect(helper.board_data[:can_admin_list]).to eq('false')
end
+
it 'returns can_admin_list as true when user can admin the board lists' do
allow(helper).to receive(:can?).with(user, :admin_issue_board_list, project).and_return(true)
@@ -141,6 +142,7 @@ RSpec.describe BoardsHelper do
it 'returns can_admin_board as false by default' do
expect(helper.board_data[:can_admin_board]).to eq('false')
end
+
it 'returns can_admin_board as true when user can admin the board' do
allow(helper).to receive(:can?).with(user, :admin_issue_board, project).and_return(true)
@@ -178,6 +180,7 @@ RSpec.describe BoardsHelper do
it 'returns can_admin_list as false by default' do
expect(helper.board_data[:can_admin_list]).to eq('false')
end
+
it 'returns can_admin_list as true when user can admin the board lists' do
allow(helper).to receive(:can?).with(user, :admin_issue_board_list, base_group).and_return(true)
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index bc9e47a4ca1..1950d685980 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::PipelineEditorHelper do
+ include CycleAnalyticsHelpers
+
let_it_be(:project) { create(:project) }
describe 'can_view_pipeline_editor?' do
@@ -62,8 +64,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-path" => project.path,
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
- "runner-help-page-path" => help_page_path('ci/runners/index'),
- "simulate-pipeline-help-page-path" => help_page_path('ci/lint', anchor: 'simulate-a-pipeline'),
+ "simulate-pipeline-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'simulate-a-cicd-pipeline'),
"total-branches" => project.repository.branches.length,
"validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
@@ -93,8 +94,7 @@ RSpec.describe Ci::PipelineEditorHelper do
"project-path" => project.path,
"project-full-path" => project.full_path,
"project-namespace" => project.namespace.full_path,
- "runner-help-page-path" => help_page_path('ci/runners/index'),
- "simulate-pipeline-help-page-path" => help_page_path('ci/lint', anchor: 'simulate-a-pipeline'),
+ "simulate-pipeline-help-page-path" => help_page_path('ci/pipeline_editor/index', anchor: 'simulate-a-cicd-pipeline'),
"total-branches" => 0,
"validate-tab-illustration-path" => 'illustrations/validate.svg',
"yml-help-page-path" => help_page_path('ci/yaml/index')
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index 4d1b1c7682c..3b18572ad64 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe Ci::RunnersHelper do
it 'returns group data for top level group' do
result = {
- update_path: "/api/v4/groups/#{parent.id}",
+ group_id: parent.id,
shared_runners_setting: Namespace::SR_ENABLED,
parent_shared_runners_setting: nil
}.merge(runner_constants)
@@ -119,7 +119,7 @@ RSpec.describe Ci::RunnersHelper do
it 'returns group data for child group' do
result = {
- update_path: "/api/v4/groups/#{group.id}",
+ group_id: group.id,
shared_runners_setting: Namespace::SR_DISABLED_AND_UNOVERRIDABLE,
parent_shared_runners_setting: Namespace::SR_ENABLED
}.merge(runner_constants)
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index b5b572e9719..b27954de0d4 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -153,16 +153,24 @@ RSpec.describe CommitsHelper do
end
describe "#conditionally_paginate_diff_files" do
- let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::Commit, diff_files: diff_files) }
- let(:diff_files) { Gitlab::Git::DiffCollection.new(files) }
- let(:page) { nil }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::Commit, diff_files: decorated_diff_files, project: project) }
+ let(:decorated_diff_files) do
+ diffs.map do |diff|
+ Gitlab::Diff::File.new(diff, repository: project.repository)
+ end
+ end
+ let(:diffs) { Gitlab::Git::DiffCollection.new(files) }
let(:files) do
Array.new(85).map do
{ too_large: false, diff: "" }
end
end
+ let(:page) { nil }
+
subject { helper.conditionally_paginate_diff_files(diffs_collection, paginate: paginate, page: page, per: Projects::CommitController::COMMIT_DIFFS_PER_PAGE) }
before do
@@ -203,8 +211,8 @@ RSpec.describe CommitsHelper do
context "pagination is disabled" do
let(:paginate) { false }
- it "returns a standard DiffCollection" do
- expect(subject).to be_a(Gitlab::Git::DiffCollection)
+ it "returns the unpaginated collection" do
+ expect(subject.size).to eq(85)
end
end
end
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index c9c8c6b13b6..4b76c370810 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -3,6 +3,82 @@
require 'spec_helper'
RSpec.describe FormHelper do
+ include Devise::Test::ControllerHelpers
+
+ describe '#dropdown_max_select' do
+ context "with the :limit_reviewer_and_assignee_size feature flag on" do
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ max = MergeRequest::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS
+
+ expect(helper.dropdown_max_select({}))
+ .to eq(max)
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }))
+ .to eq(5)
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => max + 5 }))
+ .to eq(max)
+ end
+ end
+
+ context "with the :limit_reviewer_and_assignee_size feature flag off" do
+ before do
+ stub_feature_flags(limit_reviewer_and_assignee_size: false)
+ end
+
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ expect(helper.dropdown_max_select({}))
+ .to eq(nil)
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 5 }))
+ .to eq(5)
+ expect(helper.dropdown_max_select({ 'max-select'.to_sym => 120 }))
+ .to eq(120)
+ end
+ end
+ end
+
+ describe '#reviewers_dropdown_options' do
+ let(:merge_request) { build(:merge_request) }
+
+ context "with the :limit_reviewer_and_assignee_size feature flag on" do
+ context "with multiple reviewers" do
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ allow(helper).to receive(:merge_request_supports_multiple_reviewers?).and_return(true)
+
+ expect(helper.reviewers_dropdown_options(merge_request)[:data][:'max-select'])
+ .to eq(MergeRequest::MAX_NUMBER_OF_ASSIGNEES_OR_REVIEWERS)
+ end
+ end
+
+ context "with only 1 reviewer" do
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ expect(helper.reviewers_dropdown_options(merge_request)[:data][:'max-select'])
+ .to eq(1)
+ end
+ end
+ end
+
+ context "with the :limit_reviewer_and_assignee_size feature flag off" do
+ before do
+ stub_feature_flags(limit_reviewer_and_assignee_size: false)
+ end
+
+ context "with multiple reviewers" do
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ allow(helper).to receive(:merge_request_supports_multiple_reviewers?).and_return(true)
+
+ expect(helper.reviewers_dropdown_options(merge_request)[:data][:'max-select'])
+ .to eq(nil)
+ end
+ end
+
+ context "with only 1 reviewer" do
+ it 'correctly returns the max amount of reviewers or assignees to allow' do
+ expect(helper.reviewers_dropdown_options(merge_request)[:data][:'max-select'])
+ .to eq(1)
+ end
+ end
+ end
+ end
+
describe 'form_errors' do
it 'returns nil when model has no errors' do
model = double(errors: [])
@@ -13,10 +89,7 @@ RSpec.describe FormHelper do
it 'renders an appropriately styled alert div' do
model = double(errors: errors_stub('Error 1'))
- expect(helper.form_errors(model, pajamas_alert: false))
- .to include('<div class="alert alert-danger" id="error_explanation">')
-
- expect(helper.form_errors(model, pajamas_alert: true))
+ expect(helper.form_errors(model))
.to include(
'<div class="gl-alert gl-mb-5 gl-alert-danger gl-alert-not-dismissible" id="error_explanation" role="alert">'
)
diff --git a/spec/helpers/gitlab_script_tag_helper_spec.rb b/spec/helpers/gitlab_script_tag_helper_spec.rb
index 35f2c0795be..9d71e25286e 100644
--- a/spec/helpers/gitlab_script_tag_helper_spec.rb
+++ b/spec/helpers/gitlab_script_tag_helper_spec.rb
@@ -14,6 +14,16 @@ RSpec.describe GitlabScriptTagHelper do
expect(helper.javascript_include_tag(script_url).to_s)
.to eq "<script src=\"/javascripts/#{script_url}\" defer=\"defer\" nonce=\"noncevalue\"></script>"
end
+
+ it 'returns a script tag with defer=false and a nonce' do
+ expect(helper.javascript_include_tag(script_url, defer: nil).to_s)
+ .to eq "<script src=\"/javascripts/#{script_url}\" nonce=\"noncevalue\"></script>"
+ end
+
+ it 'returns a script tag with a nonce even nonce is set to nil' do
+ expect(helper.javascript_include_tag(script_url, nonce: nil).to_s)
+ .to eq "<script src=\"/javascripts/#{script_url}\" defer=\"defer\" nonce=\"noncevalue\"></script>"
+ end
end
describe 'inline script tag' do
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index 89c26c21338..0d53225bbcf 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe Groups::GroupMembersHelper do
describe '#group_member_header_subtext' do
it 'contains expected text with group name' do
- expect(helper.group_member_header_subtext(group)).to match("You can invite a new member to .*#{group.name}")
+ expect(helper.group_member_header_subtext(group)).to match("You're viewing members of .*#{group.name}")
end
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index d00cd8f1d6b..2c1061d2f1b 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe GroupsHelper do
include ApplicationHelper
+ include AvatarsHelper
describe '#group_icon_url' do
it 'returns an url for the avatar' do
@@ -135,6 +136,37 @@ RSpec.describe GroupsHelper do
end
end
+ describe '#group_title_link' do
+ let_it_be(:group) { create(:group, :with_avatar) }
+
+ let(:raw_link) { group_title_link(group, show_avatar: true) }
+ let(:document) { Nokogiri::HTML.parse(raw_link) }
+
+ describe 'link' do
+ subject(:link) { document.css('.group-path').first }
+
+ it 'uses the group name as innerText' do
+ expect(link.inner_text).to eq(group.name)
+ end
+
+ it 'links to the group path' do
+ expect(link.attr('href')).to eq(group_path(group))
+ end
+ end
+
+ describe 'icon' do
+ subject(:icon) { document.css('.avatar-tile').first }
+
+ it 'specifies the group name as the alt text' do
+ expect(icon.attr('alt')).to eq(group.name)
+ end
+
+ it 'uses the group\'s avatar_url' do
+ expect(icon.attr('src')).to eq(group.avatar_url)
+ end
+ end
+ end
+
describe '#share_with_group_lock_help_text' do
context 'traversal queries' do
let_it_be_with_reload(:root_group) { create(:group) }
@@ -420,9 +452,31 @@ RSpec.describe GroupsHelper do
end
end
- describe '#group_name_and_path_app_data' do
- let_it_be(:group) { build(:group, name: 'My awesome group', path: 'my-awesome-group') }
+ describe '#subgroup_creation_data' do
+ let_it_be(:name) { 'parent group' }
+ let_it_be(:group) { build(:group, name: name) }
let_it_be(:subgroup) { build(:group, parent: group) }
+
+ context 'when group has a parent' do
+ it 'returns expected hash' do
+ expect(subgroup_creation_data(subgroup)).to eq({
+ import_existing_group_path: '/groups/new#import-group-pane',
+ parent_group_name: name
+ })
+ end
+ end
+
+ context 'when group does not have a parent' do
+ it 'returns expected hash' do
+ expect(subgroup_creation_data(group)).to eq({
+ import_existing_group_path: '/groups/new#import-group-pane',
+ parent_group_name: nil
+ })
+ end
+ end
+ end
+
+ describe '#group_name_and_path_app_data' do
let_it_be(:root_url) { 'https://gitlab.com/' }
before do
@@ -432,17 +486,10 @@ RSpec.describe GroupsHelper do
context 'when group has a parent' do
it 'returns expected hash' do
- expect(group_name_and_path_app_data(subgroup)).to match(
- { base_path: 'https://gitlab.com/my-awesome-group', mattermost_enabled: 'true' }
- )
- end
- end
-
- context 'when group does not have a parent' do
- it 'returns expected hash' do
- expect(group_name_and_path_app_data(group)).to match(
- { base_path: root_url, mattermost_enabled: 'true' }
- )
+ expect(group_name_and_path_app_data).to match({
+ base_path: 'https://gitlab.com/',
+ mattermost_enabled: 'true'
+ })
end
end
end
@@ -461,7 +508,7 @@ RSpec.describe GroupsHelper do
it 'returns expected hash' do
expect(helper.subgroups_and_projects_list_app_data(group)).to match({
show_schema_markup: 'true',
- new_subgroup_path: including("groups/new?parent_id=#{group.id}"),
+ new_subgroup_path: including("groups/new?parent_id=#{group.id}#create-group-pane"),
new_project_path: including("/projects/new?namespace_id=#{group.id}"),
new_subgroup_illustration: including('illustrations/subgroup-create-new-sm'),
new_project_illustration: including('illustrations/project-create-new-sm'),
diff --git a/spec/helpers/issuables_description_templates_helper_spec.rb b/spec/helpers/issuables_description_templates_helper_spec.rb
index 768ce5975c1..bd8af384d40 100644
--- a/spec/helpers/issuables_description_templates_helper_spec.rb
+++ b/spec/helpers/issuables_description_templates_helper_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
- include_context 'project issuable templates context'
-
describe '#issuable_templates' do
+ include_context 'project issuable templates context'
+
let_it_be(:inherited_from) { nil }
let_it_be(:user) { create(:user) }
let_it_be(:parent_group, reload: true) { create(:group) }
@@ -44,7 +44,7 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
end
end
- describe '#selected_template' do
+ describe '#available_service_desk_templates_for' do
let_it_be(:project) { build(:project) }
before do
@@ -72,46 +72,103 @@ RSpec.describe IssuablesDescriptionTemplatesHelper, :clean_gitlab_redis_cache do
].to_json
expect(helper.available_service_desk_templates_for(@project)).to eq(value)
end
+ end
- context 'when no issuable_template parameter or default template is present' do
- it 'does not select a template' do
- expect(helper.selected_template(project)).to be(nil)
- end
+ context 'when there are no templates in the project' do
+ let(:templates) { {} }
+
+ it 'returns empty array' do
+ value = [].to_json
+ expect(helper.available_service_desk_templates_for(@project)).to eq(value)
end
+ end
+ end
- context 'when an issuable_template parameter has been provided' do
- before do
- allow(helper).to receive(:params).and_return({ issuable_template: 'another_issue_template' })
- end
+ describe '#selected_template_name' do
+ let(:template_names) { %w(another_issue_template custom_issue_template) }
- it 'selects the issuable template' do
- expect(helper.selected_template(project)).to eq('another_issue_template')
- end
+ context 'when no issuable_template parameter is provided' do
+ it 'does not select a template' do
+ expect(helper.selected_template_name(template_names)).to be_nil
end
+ end
- context 'when there is a default template' do
- let(:templates) do
- {
- "" => [
- { name: "another_issue_template", id: "another_issue_template", project_id: project.id },
- { name: "default", id: "default", project_id: project.id }
- ]
- }
+ context 'when an issuable_template parameter has been provided' do
+ before do
+ allow(helper).to receive(:params).and_return({ issuable_template: template_param_value })
+ end
+
+ context 'when param matches existing templates' do
+ let(:template_param_value) { 'another_issue_template' }
+
+ it 'returns the matching issuable template' do
+ expect(helper.selected_template_name(template_names)).to eq('another_issue_template')
end
+ end
- it 'selects the default template' do
- expect(helper.selected_template(project)).to eq('default')
+ context 'when param does not match any templates' do
+ let(:template_param_value) { 'non_matching_issue_template' }
+
+ it 'returns nil' do
+ expect(helper.selected_template_name(template_names)).to be_nil
end
end
end
+ end
- context 'when there are not templates in the project' do
- let(:templates) { {} }
+ describe '#default_template_name' do
+ context 'when a default template is available' do
+ let(:template_names) { %w(another_issue_template deFault) }
- it 'returns empty array' do
- value = [].to_json
- expect(helper.available_service_desk_templates_for(@project)).to eq(value)
+ it 'returns the default template' do
+ issue = build(:issue)
+
+ expect(helper.default_template_name(template_names, issue)).to be('deFault')
+ end
+
+ it 'returns nil when issuable has a description set' do
+ issue = build(:issue, description: 'from template in project settings')
+
+ expect(helper.default_template_name(template_names, issue)).to be_nil
+ end
+
+ it 'returns nil when issuable is persisted' do
+ issue = create(:issue)
+
+ expect(helper.default_template_name(template_names, issue)).to be_nil
+ end
+ end
+
+ context 'when there is no default template' do
+ let(:template_names) { %w(another_issue_template) }
+
+ it 'returns nil' do
+ expect(helper.default_template_name(template_names, build(:issue))).to be_nil
end
end
end
+
+ describe '#template_names' do
+ let(:project) { build(:project) }
+ let(:templates) do
+ {
+ "Project templates" => [
+ { name: "another_issue_template", id: "another_issue_template", project_id: project.id },
+ { name: "custom_issue_template", id: "custom_issue_template", project_id: project.id }
+ ],
+ "Group templates" => [
+ { name: "another_issue_template", id: "another_issue_template", project_id: project.id }
+ ]
+ }
+ end
+
+ before do
+ allow(helper).to receive(:ref_project).and_return(project)
+ allow(helper).to receive(:issuable_templates).and_return(templates)
+ end
+
+ it 'returns unique list of template names' do
+ expect(helper.template_names(build(:issue))).to contain_exactly('another_issue_template', 'custom_issue_template')
+ end
+ end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 73527bea14e..069465c2fec 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -98,11 +98,55 @@ RSpec.describe IssuablesHelper do
end
end
- describe '#issuable_meta' do
+ describe '#issuable_meta', time_travel_to: '2022-08-05 00:00:00 +0000' do
let(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+ describe 'Issuable created status text' do
+ subject { helper.issuable_meta(issuable, project) }
+
+ context 'when issuable is a work item and flag is off' do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ where(:issuable_type, :text) do
+ :issue | 'Issue created Aug 05, 2022 by'
+ :incident | 'Incident created Aug 05, 2022 by'
+ end
+
+ let(:issuable) { build_stubbed(:work_item, issuable_type, created_at: Date.current) }
+
+ with_them do
+ it { is_expected.to have_content(text) }
+ end
+ end
+
+ context 'when issuable is a work item and flag is on' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:issuable_type, :text) do
+ :issue | 'Issue created Aug 05, 2022 by'
+ :incident | 'Incident created Aug 05, 2022 by'
+ end
+
+ let(:issuable) { build_stubbed(:work_item, issuable_type, created_at: Date.current) }
+
+ with_them do
+ it { is_expected.to have_content(text) }
+ end
+ end
+
+ context 'when issuable is not a work item' do
+ let(:issuable) { build_stubbed(:merge_request, created_at: Date.current) }
+
+ it { is_expected.to have_content('Created Aug 05, 2022') }
+ end
+ end
+
describe 'author status' do
let(:issuable) { build(:merge_request, source_project: project, author: user, created_at: '2020-01-30') }
@@ -299,7 +343,7 @@ RSpec.describe IssuablesHelper do
initialTitleText: issue.title,
initialDescriptionHtml: '<p dir="auto">issue text</p>',
initialDescriptionText: 'issue text',
- initialTaskStatus: '0 of 0 tasks completed',
+ initialTaskStatus: '0 of 0 checklist items completed',
issueType: 'issue',
iid: issue.iid.to_s,
isHidden: false
diff --git a/spec/helpers/members_helper_spec.rb b/spec/helpers/members_helper_spec.rb
index e94eb63fc2c..4a3a623ce77 100644
--- a/spec/helpers/members_helper_spec.rb
+++ b/spec/helpers/members_helper_spec.rb
@@ -74,4 +74,37 @@ RSpec.describe MembersHelper do
expect(localized_tasks_to_be_done_choices).to include(*MemberTask::TASKS.keys)
end
end
+
+ describe '#member_request_access_link' do
+ let(:project) { create(:project) }
+ let(:group) { create(:group) }
+ let(:project_member) { create(:project_member, :reporter, project: project) }
+ let(:group_member) { create(:group_member, :reporter, group: group) }
+
+ it 'returns request link for project members' do
+ user = project_member.user
+ source = project_member.source
+ link = member_request_access_link(project_member)
+
+ user_link = link_to user.name, user, class: :highlight
+ access_level = content_tag :span, project_member.human_access, class: :highlight
+ source_link = link_to source.human_name, polymorphic_url([project_member.source, :members]), class: :highlight
+ source_type = source.model_name.singular
+
+ expect(link).to eq "#{user_link} requested #{access_level} access to the #{source_link} #{source_type}."
+ end
+
+ it 'returns the request link for group members' do
+ user = group_member.user
+ source = group_member.source
+ link = member_request_access_link(group_member)
+
+ user_link = link_to user.name, user, class: :highlight
+ access_level = content_tag :span, group_member.human_access, class: :highlight
+ source_link = link_to source.human_name, polymorphic_url([group_member.source, :members]), class: :highlight
+ source_type = source.model_name.singular
+
+ expect(link).to eq "#{user_link} requested #{access_level} access to the #{source_link} #{source_type}."
+ end
+ end
end
diff --git a/spec/helpers/merge_requests_helper_spec.rb b/spec/helpers/merge_requests_helper_spec.rb
index 97ad55d9df9..fb23b5c1dc8 100644
--- a/spec/helpers/merge_requests_helper_spec.rb
+++ b/spec/helpers/merge_requests_helper_spec.rb
@@ -27,6 +27,38 @@ RSpec.describe MergeRequestsHelper do
end
end
+ describe '#merge_path_description' do
+ let(:project) { create(:project) }
+ let(:forked_project) { fork_project(project) }
+ let(:merge_request_forked) { create(:merge_request, source_project: forked_project, target_project: project) }
+ let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ where(:case_name, :mr, :with_arrow, :result) do
+ [
+ ['forked with arrow', ref(:merge_request_forked), true, lazy do
+ "Project:Branches: #{
+ mr.source_project_path}:#{mr.source_branch} → #{
+ mr.target_project.full_path}:#{mr.target_branch}"
+ end],
+ ['forked default', ref(:merge_request_forked), false, lazy do
+ "Project:Branches: #{
+ mr.source_project_path}:#{mr.source_branch} to #{
+ mr.target_project.full_path}:#{mr.target_branch}"
+ end],
+ ['with arrow', ref(:merge_request), true, lazy { "Branches: #{mr.source_branch} → #{mr.target_branch}" }],
+ ['default', ref(:merge_request), false, lazy { "Branches: #{mr.source_branch} to #{mr.target_branch}" }]
+ ]
+ end
+
+ with_them do
+ subject { merge_path_description(mr, with_arrow: with_arrow) }
+
+ it {
+ is_expected.to eq(result)
+ }
+ end
+ end
+
describe '#tab_link_for' do
let(:merge_request) { create(:merge_request, :simple) }
let(:options) { {} }
@@ -46,8 +78,7 @@ RSpec.describe MergeRequestsHelper do
let(:user) do
double(
assigned_open_merge_requests_count: 1,
- review_requested_open_merge_requests_count: 2,
- attention_requested_open_merge_requests_count: 3
+ review_requested_open_merge_requests_count: 2
)
end
@@ -57,33 +88,12 @@ RSpec.describe MergeRequestsHelper do
allow(helper).to receive(:current_user).and_return(user)
end
- describe 'mr_attention_requests disabled' do
- before do
- allow(user).to receive(:mr_attention_requests_enabled?).and_return(false)
- end
-
- it "returns assigned, review requested and total merge request counts" do
- expect(subject).to eq(
- assigned: user.assigned_open_merge_requests_count,
- review_requested: user.review_requested_open_merge_requests_count,
- total: user.assigned_open_merge_requests_count + user.review_requested_open_merge_requests_count
- )
- end
- end
-
- describe 'mr_attention_requests enabled' do
- before do
- allow(user).to receive(:mr_attention_requests_enabled?).and_return(true)
- end
-
- it "returns assigned, review requested, attention requests and total merge request counts" do
- expect(subject).to eq(
- assigned: user.assigned_open_merge_requests_count,
- review_requested: user.review_requested_open_merge_requests_count,
- attention_requested_count: user.attention_requested_open_merge_requests_count,
- total: user.attention_requested_open_merge_requests_count
- )
- end
+ it "returns assigned, review requested and total merge request counts" do
+ expect(subject).to eq(
+ assigned: user.assigned_open_merge_requests_count,
+ review_requested: user.review_requested_open_merge_requests_count,
+ total: user.assigned_open_merge_requests_count + user.review_requested_open_merge_requests_count
+ )
end
end
@@ -134,6 +144,7 @@ RSpec.describe MergeRequestsHelper do
it 'returns reviewer label with no names' do
expect(helper.reviewers_label(merge_request)).to eq("Reviewers: ")
end
+
it 'returns reviewer label only with include_value: false' do
expect(helper.reviewers_label(merge_request, include_value: false)).to eq("Reviewers")
end
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index 39f0e1c15f5..f7500709d0e 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -45,39 +45,6 @@ RSpec.describe NamespacesHelper do
user_group.add_owner(user)
end
- describe '#namespaces_as_json' do
- let(:result) { helper.namespaces_as_json(user) }
-
- before do
- allow(helper).to receive(:current_user).and_return(user)
- end
-
- it 'returns the user\'s groups' do
- json_data = Gitlab::Json.parse(result)
-
- expect(result).to include('group')
- expect(json_data['group']).to include(
- "id" => user_group.id,
- "name" => user_group.name,
- "display_path" => user_group.full_path,
- "human_name" => user_group.human_name
- )
- end
-
- it 'returns the user\'s namespace' do
- user_namespace = user.namespace
- json_data = Gitlab::Json.parse(result)
-
- expect(result).to include('user')
- expect(json_data['user']).to include(
- "id" => user_namespace.id,
- "name" => user_namespace.name,
- "display_path" => user_namespace.full_path,
- "human_name" => user_namespace.human_name
- )
- end
- end
-
describe '#namespaces_options' do
context 'when admin mode is enabled', :enable_admin_mode do
it 'returns groups without being a member for admin' do
diff --git a/spec/helpers/nav/new_dropdown_helper_spec.rb b/spec/helpers/nav/new_dropdown_helper_spec.rb
index 2fe237fb996..45664a7e0bd 100644
--- a/spec/helpers/nav/new_dropdown_helper_spec.rb
+++ b/spec/helpers/nav/new_dropdown_helper_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe Nav::NewDropdownHelper do
menu_item: ::Gitlab::Nav::TopNavMenuItem.build(
id: 'new_subgroup',
title: 'New subgroup',
- href: "/groups/new?parent_id=#{group.id}",
+ href: "/groups/new?parent_id=#{group.id}#create-group-pane",
data: { track_action: 'click_link_new_subgroup', track_label: 'plus_menu_dropdown' }
)
)
diff --git a/spec/helpers/nav/top_nav_helper_spec.rb b/spec/helpers/nav/top_nav_helper_spec.rb
index 9d43e057521..e4fa503b5ee 100644
--- a/spec/helpers/nav/top_nav_helper_spec.rb
+++ b/spec/helpers/nav/top_nav_helper_spec.rb
@@ -88,18 +88,6 @@ RSpec.describe Nav::TopNavHelper do
expect(subject[:shortcuts]).to eq(expected_shortcuts)
end
- it 'has expected :secondary' do
- expected_secondary = [
- ::Gitlab::Nav::TopNavMenuItem.build(
- href: '/help',
- id: 'help',
- title: 'Help',
- icon: 'question-o'
- )
- ]
- expect(subject[:secondary]).to eq(expected_secondary)
- end
-
context 'with current nav as project' do
before do
helper.nav('project')
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index 399726263db..63641e65942 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -67,38 +67,6 @@ RSpec.describe ProfilesHelper do
end
end
- describe "#user_status_set_to_busy?" do
- using RSpec::Parameterized::TableSyntax
-
- where(:availability, :result) do
- "busy" | true
- "not_set" | false
- "" | false
- nil | false
- end
-
- with_them do
- it { expect(helper.user_status_set_to_busy?(OpenStruct.new(availability: availability))).to eq(result) }
- end
- end
-
- describe "#show_status_emoji?" do
- using RSpec::Parameterized::TableSyntax
-
- where(:message, :emoji, :result) do
- "Some message" | UserStatus::DEFAULT_EMOJI | true
- "Some message" | "" | true
- "" | "basketball" | true
- "" | "basketball" | true
- "" | UserStatus::DEFAULT_EMOJI | false
- "" | UserStatus::DEFAULT_EMOJI | false
- end
-
- with_them do
- it { expect(helper.show_status_emoji?(OpenStruct.new(message: message, emoji: emoji))).to eq(result) }
- end
- end
-
describe "#ssh_key_expiration_tooltip" do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/projects/pipeline_helper_spec.rb b/spec/helpers/projects/pipeline_helper_spec.rb
index 2b2dad286c7..8ce4e9f5293 100644
--- a/spec/helpers/projects/pipeline_helper_spec.rb
+++ b/spec/helpers/projects/pipeline_helper_spec.rb
@@ -27,7 +27,13 @@ RSpec.describe Projects::PipelineHelper do
metrics_path: namespace_project_ci_prometheus_metrics_histograms_path(namespace_id: project.namespace, project_id: project, format: :json),
pipeline_iid: pipeline.iid,
pipeline_project_path: project.full_path,
- total_job_count: pipeline.total_size
+ total_job_count: pipeline.total_size,
+ summary_endpoint: summary_project_pipeline_tests_path(project, pipeline, format: :json),
+ suite_endpoint: project_pipeline_test_path(project, pipeline, suite_name: 'suite', format: :json),
+ blob_path: project_blob_path(project, pipeline.sha),
+ has_test_report: pipeline.has_reports?(Ci::JobArtifact.test_reports),
+ empty_state_image_path: match_asset_path('illustrations/empty-state/empty-test-cases-lg.svg'),
+ artifacts_expired_image_path: match_asset_path('illustrations/pipeline.svg')
})
end
end
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index b7cc8c217a4..04c066986b7 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -966,7 +966,10 @@ RSpec.describe ProjectsHelper do
operationsAccessLevel: project.project_feature.operations_access_level,
showDefaultAwardEmojis: project.show_default_award_emojis?,
securityAndComplianceAccessLevel: project.security_and_compliance_access_level,
- containerRegistryAccessLevel: project.project_feature.container_registry_access_level
+ containerRegistryAccessLevel: project.project_feature.container_registry_access_level,
+ environmentsAccessLevel: project.project_feature.environments_access_level,
+ featureFlagsAccessLevel: project.project_feature.feature_flags_access_level,
+ releasesAccessLevel: project.project_feature.releases_access_level
)
end
@@ -1313,4 +1316,38 @@ RSpec.describe ProjectsHelper do
end
end
end
+
+ describe '#project_coverage_chart_data_attributes' do
+ let(:ref) { 'ref' }
+ let(:daily_coverage_options) do
+ {
+ base_params: {
+ start_date: Date.current - 90.days,
+ end_date: Date.current,
+ ref_path: project.repository.expand_ref(ref),
+ param_type: 'coverage'
+ },
+ download_path: namespace_project_ci_daily_build_group_report_results_path(
+ namespace_id: project.namespace,
+ project_id: project,
+ format: :csv
+ ),
+ graph_api_path: namespace_project_ci_daily_build_group_report_results_path(
+ namespace_id: project.namespace,
+ project_id: project,
+ format: :json
+ )
+ }
+ end
+
+ it 'returns project data to render coverage chart' do
+ expect(helper.project_coverage_chart_data_attributes(daily_coverage_options, ref)).to include(
+ graph_endpoint: start_with(daily_coverage_options.fetch(:graph_api_path)),
+ graph_start_date: daily_coverage_options.dig(:base_params, :start_date).strftime('%b %d'),
+ graph_end_date: daily_coverage_options.dig(:base_params, :end_date).strftime('%b %d'),
+ graph_ref: ref,
+ graph_csv_path: start_with(daily_coverage_options.fetch(:download_path))
+ )
+ end
+ end
end
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 1ead1fc9b8b..513e2865ee3 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -74,19 +74,21 @@ RSpec.describe SearchHelper do
expect(result.keys).to match_array(%i[category id value label url avatar_url])
end
- it 'includes the users recently viewed issues', :aggregate_failures do
+ it 'includes the users recently viewed issues and project with correct order', :aggregate_failures do
recent_issues = instance_double(::Gitlab::Search::RecentIssues)
expect(::Gitlab::Search::RecentIssues).to receive(:new).with(user: user).and_return(recent_issues)
project1 = create(:project, :with_avatar, namespace: user.namespace)
project2 = create(:project, namespace: user.namespace)
issue1 = create(:issue, title: 'issue 1', project: project1)
issue2 = create(:issue, title: 'issue 2', project: project2)
+ project = create(:project, title: 'the search term')
+ project.add_developer(user)
expect(recent_issues).to receive(:search).with('the search term').and_return(Issue.id_in_ordered([issue1.id, issue2.id]))
results = search_autocomplete_opts("the search term")
- expect(results.count).to eq(2)
+ expect(results.count).to eq(3)
expect(results[0]).to include({
category: 'Recent issues',
@@ -103,6 +105,13 @@ RSpec.describe SearchHelper do
url: Gitlab::Routing.url_helpers.project_issue_path(issue2.project, issue2),
avatar_url: '' # This project didn't have an avatar so set this to ''
})
+
+ expect(results[2]).to include({
+ category: 'Projects',
+ id: project.id,
+ label: project.full_name,
+ url: Gitlab::Routing.url_helpers.project_path(project)
+ })
end
it 'includes the users recently viewed issues with the exact same name', :aggregate_failures do
diff --git a/spec/helpers/storage_helper_spec.rb b/spec/helpers/storage_helper_spec.rb
index 4b46bf169e0..6c3556c874b 100644
--- a/spec/helpers/storage_helper_spec.rb
+++ b/spec/helpers/storage_helper_spec.rb
@@ -57,8 +57,8 @@ RSpec.describe StorageHelper do
let_it_be(:paid_group) { create(:group) }
before do
- allow(helper).to receive(:can?).with(current_user, :maintain_namespace, free_group).and_return(true)
- allow(helper).to receive(:can?).with(current_user, :maintain_namespace, paid_group).and_return(true)
+ allow(helper).to receive(:can?).with(current_user, :maintainer_access, free_group).and_return(true)
+ allow(helper).to receive(:can?).with(current_user, :maintainer_access, paid_group).and_return(true)
allow(helper).to receive(:current_user) { current_user }
allow(paid_group).to receive(:paid?).and_return(true)
@@ -84,7 +84,7 @@ RSpec.describe StorageHelper do
end
it 'returns nil when current_user do not have access usage quotas page' do
- allow(helper).to receive(:can?).with(current_user, :maintain_namespace, free_group).and_return(false)
+ allow(helper).to receive(:can?).with(current_user, :maintainer_access, free_group).and_return(false)
expect(helper.storage_enforcement_banner_info(free_group)).to be(nil)
end
@@ -97,12 +97,16 @@ RSpec.describe StorageHelper do
context 'when current_user can access the usage quotas page' do
it 'returns a hash' do
+ used_storage = helper.storage_counter(free_group.root_storage_statistics&.storage_size || 0)
+
expect(helper.storage_enforcement_banner_info(free_group)).to eql({
- text: "From #{storage_enforcement_date} storage limits will apply to this namespace. You are currently using 0 Bytes of namespace storage. View and manage your usage from <strong>Group settings &gt; Usage quotas</strong>.",
+ text_paragraph_1: "Effective #{storage_enforcement_date}, namespace storage limits will apply to the <strong>#{free_group.name}</strong> namespace. View the <a href=\"/help/user/usage_quotas#namespace-storage-limit-enforcement-schedule\" >rollout schedule for this change</a>.",
+ text_paragraph_2: "The namespace is currently using <strong>#{used_storage}</strong> of namespace storage. Group owners can view namespace storage usage and purchase more from <strong>Group settings &gt; Usage quotas</strong>. <a href=\"/help/user/usage_quotas#manage-your-storage-usage\" >Learn more.</a>",
+ text_paragraph_3: "See our <a href=\"https://about.gitlab.com/pricing/faq-efficient-free-tier/#storage-limits-on-gitlab-saas-free-tier\" >FAQ</a> for more information.",
variant: 'warning',
+ namespace_id: free_group.id,
callouts_feature_name: 'storage_enforcement_banner_second_enforcement_threshold',
- callouts_path: '/-/users/group_callouts',
- learn_more_link: '<a rel="noopener noreferrer" target="_blank" href="/help//">Learn more.</a>'
+ callouts_path: '/-/users/group_callouts'
})
end
@@ -112,7 +116,7 @@ RSpec.describe StorageHelper do
end
it 'returns a hash with the correct storage size text' do
- expect(helper.storage_enforcement_banner_info(free_group)[:text]).to eql("From #{storage_enforcement_date} storage limits will apply to this namespace. You are currently using 100 KB of namespace storage. View and manage your usage from <strong>Group settings &gt; Usage quotas</strong>.")
+ expect(helper.storage_enforcement_banner_info(free_group)[:text_paragraph_2]).to eql("The namespace is currently using <strong>100 KB</strong> of namespace storage. Group owners can view namespace storage usage and purchase more from <strong>Group settings &gt; Usage quotas</strong>. <a href=\"/help/user/usage_quotas#manage-your-storage-usage\" >Learn more.</a>")
end
end
@@ -120,11 +124,12 @@ RSpec.describe StorageHelper do
let_it_be(:sub_group) { build(:group) }
before do
+ allow(helper).to receive(:can?).with(current_user, :maintainer_access, sub_group).and_return(true)
allow(sub_group).to receive(:root_ancestor).and_return(free_group)
end
it 'returns the banner hash' do
- expect(helper.storage_enforcement_banner_info(sub_group).keys).to match_array(%i(text variant callouts_feature_name callouts_path learn_more_link))
+ expect(helper.storage_enforcement_banner_info(sub_group).keys).to match_array(%i(text_paragraph_1 text_paragraph_2 text_paragraph_3 variant namespace_id callouts_feature_name callouts_path))
end
end
end
@@ -136,7 +141,8 @@ RSpec.describe StorageHelper do
end
it 'returns the enforcement info' do
- expect(helper.storage_enforcement_banner_info(free_group)[:text]).to include("From #{Date.current} storage limits will apply to this namespace.")
+ puts helper.storage_enforcement_banner_info(free_group)[:text_paragraph_1]
+ expect(helper.storage_enforcement_banner_info(free_group)[:text_paragraph_1]).to include("Effective #{Date.current}, namespace storage limits will apply")
end
end
diff --git a/spec/helpers/users_helper_spec.rb b/spec/helpers/users_helper_spec.rb
index 88030299574..78a15f52be5 100644
--- a/spec/helpers/users_helper_spec.rb
+++ b/spec/helpers/users_helper_spec.rb
@@ -421,6 +421,25 @@ RSpec.describe UsersHelper do
end
end
+ describe '#user_email_help_text' do
+ subject(:user_email_help_text) { helper.user_email_help_text(user) }
+
+ context 'when `user.unconfirmed_email` is not set' do
+ it 'contains avatar detection text' do
+ expect(user_email_help_text).to include _('We also use email for avatar detection if no avatar is uploaded.')
+ end
+ end
+
+ context 'when `user.unconfirmed_email` is set' do
+ let(:user) { create(:user, :unconfirmed, unconfirmed_email: 'foo@bar.com') }
+
+ it 'contains resend confirmation e-mail text' do
+ expect(user_email_help_text).to include _('Resend confirmation e-mail')
+ expect(user_email_help_text).to include _('Please click the link in the confirmation email before continuing. It was sent to ')
+ end
+ end
+ end
+
describe '#admin_user_actions_data_attributes' do
subject(:data) { helper.admin_user_actions_data_attributes(user) }
diff --git a/spec/initializers/00_deprecations_spec.rb b/spec/initializers/00_deprecations_spec.rb
new file mode 100644
index 00000000000..e52e64415af
--- /dev/null
+++ b/spec/initializers/00_deprecations_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe '00_deprecations' do
+ where(:warning) do
+ [
+ "ActiveModel::Errors#keys is deprecated and will be removed in Rails 6.2",
+ "Rendering actions with '.' in the name is deprecated:",
+ "default_hash is deprecated and will be removed from Rails 6.2"
+ ]
+ end
+
+ with_them do
+ specify do
+ expect { ActiveSupport::Deprecation.warn(warning) }
+ .to raise_error(ActiveSupport::DeprecationException)
+ end
+ end
+end
diff --git a/spec/initializers/0_log_deprecations_spec.rb b/spec/initializers/0_log_deprecations_spec.rb
index f5065126eaf..d34be32f7d0 100644
--- a/spec/initializers/0_log_deprecations_spec.rb
+++ b/spec/initializers/0_log_deprecations_spec.rb
@@ -11,6 +11,15 @@ RSpec.describe '0_log_deprecations' do
load Rails.root.join('config/initializers/0_log_deprecations.rb')
end
+ def with_deprecation_behavior
+ behavior = ActiveSupport::Deprecation.behavior
+ ActiveSupport::Deprecation.behavior = deprecation_behavior
+ yield
+ ensure
+ ActiveSupport::Deprecation.behavior = behavior
+ end
+
+ let(:deprecation_behavior) { :stderr }
let(:env_var) { '1' }
before do
@@ -24,19 +33,39 @@ RSpec.describe '0_log_deprecations' do
end
around do |example|
- # reset state changed by initializer
- Warning.clear(&example)
+ with_deprecation_behavior do
+ # reset state changed by initializer
+ Warning.clear(&example)
+ end
end
describe 'Ruby deprecations' do
- context 'when catching deprecations through Kernel#warn' do
- it 'also logs them to deprecation logger' do
+ shared_examples 'deprecation logger' do
+ it 'logs them to deprecation logger once and to stderr' do
expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
message: 'ABC gem is deprecated',
source: 'ruby'
)
- expect { warn('ABC gem is deprecated') }.to output.to_stderr
+ expect { subject }.to output.to_stderr
+ end
+ end
+
+ context 'when catching deprecations through Kernel#warn' do
+ subject { warn('ABC gem is deprecated') }
+
+ include_examples 'deprecation logger'
+
+ context 'with non-notify deprecation behavior' do
+ let(:deprecation_behavior) { :silence }
+
+ include_examples 'deprecation logger'
+ end
+
+ context 'with notify deprecation behavior' do
+ let(:deprecation_behavior) { :notify }
+
+ include_examples 'deprecation logger'
end
end
@@ -60,13 +89,40 @@ RSpec.describe '0_log_deprecations' do
end
describe 'Rails deprecations' do
- it 'logs them to deprecation logger' do
- expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
- message: match(/^DEPRECATION WARNING: ABC will be removed/),
- source: 'rails'
- )
+ subject { ActiveSupport::Deprecation.warn('ABC will be removed') }
+
+ shared_examples 'deprecation logger' do
+ it 'logs them to deprecation logger once' do
+ expect(Gitlab::DeprecationJsonLogger).to receive(:info).with(
+ message: match(/^DEPRECATION WARNING: ABC will be removed/),
+ source: 'rails'
+ )
+
+ subject
+ end
+ end
+
+ context 'with non-notify deprecation behavior' do
+ let(:deprecation_behavior) { :silence }
+
+ include_examples 'deprecation logger'
+ end
+
+ context 'with notify deprecation behavior' do
+ let(:deprecation_behavior) { :notify }
+
+ include_examples 'deprecation logger'
+ end
+
+ context 'when deprecations were silenced' do
+ around do |example|
+ silenced = ActiveSupport::Deprecation.silenced
+ ActiveSupport::Deprecation.silenced = true
+ example.run
+ ActiveSupport::Deprecation.silenced = silenced
+ end
- expect { ActiveSupport::Deprecation.warn('ABC will be removed') }.to output.to_stderr
+ include_examples 'deprecation logger'
end
context 'when disabled via environment' do
diff --git a/spec/initializers/diagnostic_reports_spec.rb b/spec/initializers/diagnostic_reports_spec.rb
new file mode 100644
index 00000000000..70574194916
--- /dev/null
+++ b/spec/initializers/diagnostic_reports_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'diagnostic reports' do
+ subject(:load_initializer) do
+ load Rails.root.join('config/initializers/diagnostic_reports.rb')
+ end
+
+ shared_examples 'does not modify worker startup hooks' do
+ it do
+ expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+ expect(Gitlab::Memory::ReportsDaemon).not_to receive(:instance)
+
+ load_initializer
+ end
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_ENABLED is set to true' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_ENABLED', true)
+ end
+
+ context 'when run in application context' do
+ before do
+ allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ it 'modifies worker startup hooks' do
+ report_daemon = instance_double(Gitlab::Memory::ReportsDaemon)
+
+ expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_call_original
+ expect(Gitlab::Memory::ReportsDaemon).to receive(:instance).and_return(report_daemon)
+ expect(report_daemon).to receive(:start)
+
+ load_initializer
+ end
+ end
+
+ context 'when run in non-application context, such as rails console or tests' do
+ before do
+ allow(::Gitlab::Runtime).to receive(:application?).and_return(false)
+ end
+
+ include_examples 'does not modify worker startup hooks'
+ end
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_ENABLED is not set' do
+ before do
+ allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ include_examples 'does not modify worker startup hooks'
+ end
+
+ context 'when GITLAB_DIAGNOSTIC_REPORTS_ENABLED is set to false' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_ENABLED', false)
+ allow(::Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ include_examples 'does not modify worker startup hooks'
+ end
+end
diff --git a/spec/initializers/global_id_spec.rb b/spec/initializers/global_id_spec.rb
index 4deb1833999..edca4533b3a 100644
--- a/spec/initializers/global_id_spec.rb
+++ b/spec/initializers/global_id_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'global_id' do
it 'patches GlobalID to find aliased models when a deprecation exists' do
allow(Gitlab::GlobalId::Deprecations).to receive(:deprecation_for).and_call_original
- allow(Gitlab::GlobalId::Deprecations).to receive(:deprecation_for).with('Issue').and_return(double(new_model_name: 'Project'))
+ allow(Gitlab::GlobalId::Deprecations).to receive(:deprecation_for).with('Issue').and_return(double(new_name: 'Project'))
project = create(:project)
gid_string = Gitlab::GlobalId.build(model_name: Issue.name, id: project.id).to_s
diff --git a/spec/initializers/memory_watchdog_spec.rb b/spec/initializers/memory_watchdog_spec.rb
new file mode 100644
index 00000000000..56f995b5cd3
--- /dev/null
+++ b/spec/initializers/memory_watchdog_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe 'memory watchdog' do
+ subject(:run_initializer) do
+ load Rails.root.join('config/initializers/memory_watchdog.rb')
+ end
+
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is truthy' do
+ let(:env_switch) { 'true' }
+
+ before do
+ stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', env_switch)
+ end
+
+ context 'when runtime is an application' do
+ let(:watchdog) { instance_double(Gitlab::Memory::Watchdog) }
+ let(:background_task) { instance_double(Gitlab::BackgroundTask) }
+
+ before do
+ allow(Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ it 'registers a life-cycle hook' do
+ expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start)
+
+ run_initializer
+ end
+
+ shared_examples 'starts watchdog with handler' do |handler_class|
+ it "uses the #{handler_class} and starts the watchdog" do
+ expect(Gitlab::Memory::Watchdog).to receive(:new).with(
+ handler: an_instance_of(handler_class),
+ logger: Gitlab::AppLogger).and_return(watchdog)
+ expect(Gitlab::BackgroundTask).to receive(:new).with(watchdog).and_return(background_task)
+ expect(background_task).to receive(:start)
+ expect(Gitlab::Cluster::LifecycleEvents).to receive(:on_worker_start).and_yield
+
+ run_initializer
+ end
+ end
+
+ # In tests, the Puma constant does not exist so we cannot use a verified double.
+ # rubocop: disable RSpec/VerifiedDoubles
+ context 'when puma' do
+ let(:puma) do
+ Class.new do
+ def self.cli_config
+ Struct.new(:options).new
+ end
+ end
+ end
+
+ before do
+ stub_const('Puma', puma)
+ stub_const('Puma::Cluster::WorkerHandle', double.as_null_object)
+
+ allow(Gitlab::Runtime).to receive(:puma?).and_return(true)
+ end
+
+ it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::PumaHandler
+ end
+ # rubocop: enable RSpec/VerifiedDoubles
+
+ context 'when sidekiq' do
+ before do
+ allow(Gitlab::Runtime).to receive(:sidekiq?).and_return(true)
+ end
+
+ it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::TermProcessHandler
+ end
+
+ context 'when other runtime' do
+ it_behaves_like 'starts watchdog with handler', Gitlab::Memory::Watchdog::NullHandler
+ end
+ end
+
+ context 'when runtime is unsupported' do
+ it 'does not register life-cycle hook' do
+ expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+
+ run_initializer
+ end
+ end
+ end
+
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is false' do
+ let(:env_switch) { 'false' }
+
+ before do
+ stub_env('GITLAB_MEMORY_WATCHDOG_ENABLED', env_switch)
+ # To rule out we return early due to this being false.
+ allow(Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ it 'does not register life-cycle hook' do
+ expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+
+ run_initializer
+ end
+ end
+
+ context 'when GITLAB_MEMORY_WATCHDOG_ENABLED is not set' do
+ before do
+ # To rule out we return early due to this being false.
+ allow(Gitlab::Runtime).to receive(:application?).and_return(true)
+ end
+
+ it 'does not register life-cycle hook' do
+ expect(Gitlab::Cluster::LifecycleEvents).not_to receive(:on_worker_start)
+
+ run_initializer
+ end
+ end
+end
diff --git a/spec/lib/api/ci/helpers/runner_helpers_spec.rb b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
index c6cdc1732f5..b254c419cbc 100644
--- a/spec/lib/api/ci/helpers/runner_helpers_spec.rb
+++ b/spec/lib/api/ci/helpers/runner_helpers_spec.rb
@@ -71,8 +71,8 @@ RSpec.describe API::Ci::Helpers::Runner do
end
end
- describe '#log_artifact_size' do
- subject { runner_helper.log_artifact_size(artifact) }
+ describe '#log_artifacts_filesize' do
+ subject { runner_helper.log_artifacts_filesize(artifact) }
let(:runner_params) { {} }
let(:artifact) { create(:ci_job_artifact, size: 42) }
diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb
index f91ae1fc5a1..4de85862ab9 100644
--- a/spec/lib/api/entities/bulk_imports/entity_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe API::Entities::BulkImports::Entity do
:status,
:source_full_path,
:destination_name,
+ :destination_slug,
:destination_namespace,
:parent_id,
:namespace_id,
diff --git a/spec/lib/api/entities/ci/job_request/image_spec.rb b/spec/lib/api/entities/ci/job_request/image_spec.rb
index 3ab14ffc3ae..fca3b5d3fa9 100644
--- a/spec/lib/api/entities/ci/job_request/image_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/image_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Image do
- let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
+ let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }] }
let(:image) { double(name: 'image_name', entrypoint: ['foo'], ports: ports, pull_policy: ['if-not-present']) }
let(:entity) { described_class.new(image) }
diff --git a/spec/lib/api/entities/ci/job_request/port_spec.rb b/spec/lib/api/entities/ci/job_request/port_spec.rb
index 8e0d2cabcfc..3f2ca3275c1 100644
--- a/spec/lib/api/entities/ci/job_request/port_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/port_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ::API::Entities::Ci::JobRequest::Port do
- let(:port) { double(number: 80, protocol: 'http', name: 'name')}
+ let(:port) { double(number: 80, protocol: 'http', name: 'name') }
let(:entity) { described_class.new(port) }
subject { entity.as_json }
diff --git a/spec/lib/api/entities/ci/job_request/service_spec.rb b/spec/lib/api/entities/ci/job_request/service_spec.rb
index 47c2c4e04c9..86f2120c321 100644
--- a/spec/lib/api/entities/ci/job_request/service_spec.rb
+++ b/spec/lib/api/entities/ci/job_request/service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe API::Entities::Ci::JobRequest::Service do
- let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }]}
+ let(:ports) { [{ number: 80, protocol: 'http', name: 'name' }] }
let(:service) do
instance_double(
::Gitlab::Ci::Build::Image,
diff --git a/spec/lib/api/entities/project_spec.rb b/spec/lib/api/entities/project_spec.rb
index 6b542278fa6..f4073683919 100644
--- a/spec/lib/api/entities/project_spec.rb
+++ b/spec/lib/api/entities/project_spec.rb
@@ -13,6 +13,19 @@ RSpec.describe ::API::Entities::Project do
subject(:json) { entity.as_json }
+ context 'without project feature' do
+ before do
+ project.project_feature.destroy!
+ project.reload
+ end
+
+ it 'returns a response' do
+ expect(json[:issues_access_level]).to be_nil
+ expect(json[:repository_access_level]).to be_nil
+ expect(json[:merge_requests_access_level]).to be_nil
+ end
+ end
+
describe '.service_desk_address' do
before do
allow(project).to receive(:service_desk_enabled?).and_return(true)
diff --git a/spec/lib/api/helpers/authentication_spec.rb b/spec/lib/api/helpers/authentication_spec.rb
index eea5c10d4f8..ac5886fdadd 100644
--- a/spec/lib/api/helpers/authentication_spec.rb
+++ b/spec/lib/api/helpers/authentication_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe API::Helpers::Authentication do
class << cls
def helpers(*modules, &block)
modules.each { |m| include m }
- include Module.new.tap { |m| m.class_eval(&block) } if block_given?
+ include Module.new.tap { |m| m.class_eval(&block) } if block
end
end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index 23c97e2c0a3..cd41d362d03 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe API::Helpers do
context 'support for IDs and paths as argument' do
let_it_be(:project) { create(:project) }
- let(:user) { project.first_owner}
+ let(:user) { project.first_owner }
before do
allow(helper).to receive(:current_user).and_return(user)
diff --git a/spec/lib/api/support/git_access_actor_spec.rb b/spec/lib/api/support/git_access_actor_spec.rb
index a09cabf4cd7..e1c800d25a7 100644
--- a/spec/lib/api/support/git_access_actor_spec.rb
+++ b/spec/lib/api/support/git_access_actor_spec.rb
@@ -83,6 +83,36 @@ RSpec.describe API::Support::GitAccessActor do
end
end
+ describe '#deploy_key_or_user' do
+ it 'returns a deploy key when the params contains deploy key' do
+ key = create(:deploy_key)
+ params = { key_id: key.id }
+
+ expect(described_class.from_params(params).deploy_key_or_user).to eq(key)
+ end
+
+ it 'returns a user when the params contains personal key' do
+ key = create(:key)
+ params = { key_id: key.id }
+
+ expect(described_class.from_params(params).deploy_key_or_user).to eq(key.user)
+ end
+
+ it 'returns a user when the params contains user id' do
+ user = create(:user)
+ params = { user_id: user.id }
+
+ expect(described_class.from_params(params).deploy_key_or_user).to eq(user)
+ end
+
+ it 'returns a user when the params contains user name' do
+ user = create(:user)
+ params = { username: user.username }
+
+ expect(described_class.from_params(params).deploy_key_or_user).to eq(user)
+ end
+ end
+
describe '#username' do
context 'when initialized with a User' do
let(:user) { build(:user) }
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 53db7f0f149..ed5b34b7f8c 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Backup::Database do
let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
let(:force) { true }
- subject { described_class.new(progress, force: force) }
+ subject { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) }
before do
allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
@@ -68,7 +68,7 @@ RSpec.describe Backup::Database do
context 'when the restore command prints errors' do
let(:visible_error) { "This is a test error\n" }
- let(:noise) { "Table projects does not exist\nmust be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
+ let(:noise) { "must be owner of extension pg_trgm\nWARNING: no privileges could be revoked for public\n" }
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors and has a post restore warning' do
@@ -105,5 +105,25 @@ RSpec.describe Backup::Database do
expect(output).to include(%("PGUSER"=>"#{config['username']}")) if config['username']
end
end
+
+ context 'when the source file is missing' do
+ let(:main_database) { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) }
+ let(:ci_database) { described_class.new(Gitlab::Database::CI_DATABASE_NAME.to_sym, progress, force: force) }
+ let(:missing_file) { Rails.root.join("spec/fixtures/missing_file.tar.gz").to_s }
+
+ it 'main database raises an error about missing source file' do
+ expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke)
+
+ expect do
+ main_database.restore(missing_file)
+ end.to raise_error(Backup::Error, /Source database file does not exist/)
+ end
+
+ it 'ci database tolerates missing source file' do
+ expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke)
+ skip_if_multiple_databases_not_setup
+ expect { ci_database.restore(missing_file) }.not_to raise_error
+ end
+ end
end
end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 3a9c4dfe3fb..d427e41026e 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-parallel', '3', '-id', backup_id).and_call_original
subject.start(:create, destination, backup_id: backup_id)
subject.finish!
@@ -84,7 +84,7 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer', '-id', backup_id).and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything, '-layout', 'pointer', '-parallel-storage', '3', '-id', backup_id).and_call_original
subject.start(:create, destination, backup_id: backup_id)
subject.finish!
@@ -103,36 +103,6 @@ RSpec.describe Backup::GitalyBackup do
expect { subject.start(:create, destination, backup_id: backup_id) }.to raise_error(::Backup::Error, 'gitaly-backup binary not found and gitaly_backup_path is not configured')
end
-
- context 'feature flag incremental_repository_backup disabled' do
- before do
- stub_feature_flags(incremental_repository_backup: false)
- end
-
- it 'creates repository bundles', :aggregate_failures do
- # Add data to the wiki, design repositories, and snippets, so they will be included in the dump.
- create(:wiki_page, container: project)
- create(:design, :with_file, issue: create(:issue, project: project))
- project_snippet = create(:project_snippet, :repository, project: project)
- personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
-
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'create', '-path', anything).and_call_original
-
- subject.start(:create, destination, backup_id: backup_id)
- subject.enqueue(project, Gitlab::GlRepository::PROJECT)
- subject.enqueue(project, Gitlab::GlRepository::WIKI)
- subject.enqueue(project, Gitlab::GlRepository::DESIGN)
- subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
- subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.finish!
-
- expect(File).to exist(File.join(destination, project.disk_path + '.bundle'))
- expect(File).to exist(File.join(destination, project.disk_path + '.wiki.bundle'))
- expect(File).to exist(File.join(destination, project.disk_path + '.design.bundle'))
- expect(File).to exist(File.join(destination, personal_snippet.disk_path + '.bundle'))
- expect(File).to exist(File.join(destination, project_snippet.disk_path + '.bundle'))
- end
- end
end
context 'hashed storage' do
@@ -208,7 +178,7 @@ RSpec.describe Backup::GitalyBackup do
let(:max_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel', '3', '-layout', 'pointer').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer', '-parallel', '3').and_call_original
subject.start(:restore, destination, backup_id: backup_id)
subject.finish!
@@ -219,45 +189,13 @@ RSpec.describe Backup::GitalyBackup do
let(:storage_parallelism) { 3 }
it 'passes parallel option through' do
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-parallel-storage', '3', '-layout', 'pointer').and_call_original
+ expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything, '-layout', 'pointer', '-parallel-storage', '3').and_call_original
subject.start(:restore, destination, backup_id: backup_id)
subject.finish!
end
end
- context 'feature flag incremental_repository_backup disabled' do
- before do
- stub_feature_flags(incremental_repository_backup: false)
- end
-
- it 'restores from repository bundles', :aggregate_failures do
- copy_bundle_to_backup_path('project_repo.bundle', project.disk_path + '.bundle')
- copy_bundle_to_backup_path('wiki_repo.bundle', project.disk_path + '.wiki.bundle')
- copy_bundle_to_backup_path('design_repo.bundle', project.disk_path + '.design.bundle')
- copy_bundle_to_backup_path('personal_snippet_repo.bundle', personal_snippet.disk_path + '.bundle')
- copy_bundle_to_backup_path('project_snippet_repo.bundle', project_snippet.disk_path + '.bundle')
-
- expect(Open3).to receive(:popen2).with(expected_env, anything, 'restore', '-path', anything).and_call_original
-
- subject.start(:restore, destination, backup_id: backup_id)
- subject.enqueue(project, Gitlab::GlRepository::PROJECT)
- subject.enqueue(project, Gitlab::GlRepository::WIKI)
- subject.enqueue(project, Gitlab::GlRepository::DESIGN)
- subject.enqueue(personal_snippet, Gitlab::GlRepository::SNIPPET)
- subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
- subject.finish!
-
- collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
-
- expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
- expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
- expect(collect_commit_shas.call(project.design_repository)).to match_array(['c3cd4d7bd73a51a0f22045c3a4c871c435dc959d'])
- expect(collect_commit_shas.call(personal_snippet.repository)).to match_array(['3b3c067a3bc1d1b695b51e2be30c0f8cf698a06e'])
- expect(collect_commit_shas.call(project_snippet.repository)).to match_array(['6e44ba56a4748be361a841e759c20e421a1651a1'])
- end
- end
-
it 'raises when the exit code not zero' do
expect(subject).to receive(:bin_path).and_return(Gitlab::Utils.which('false'))
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 519d414f643..f85b005f4d1 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -24,7 +24,17 @@ RSpec.describe Backup::Manager do
describe '#run_create_task' do
let(:enabled) { true }
let(:task) { instance_double(Backup::Task) }
- let(:definitions) { { 'my_task' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, destination_path: 'my_task.tar.gz', human_name: 'my task') } }
+ let(:definitions) do
+ {
+ 'my_task' => Backup::Manager::TaskDefinition.new(
+ task: task,
+ enabled: enabled,
+ destination_path: 'my_task.tar.gz',
+ human_name: 'my task',
+ task_group: 'group1'
+ )
+ }
+ end
it 'calls the named task' do
expect(task).to receive(:dump)
@@ -53,6 +63,16 @@ RSpec.describe Backup::Manager do
subject.run_create_task('my_task')
end
end
+
+ describe 'task group skipped' do
+ it 'informs the user' do
+ stub_env('SKIP', 'group1')
+
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... [SKIPPED]')
+
+ subject.run_create_task('my_task')
+ end
+ end
end
describe '#run_restore_task' do
@@ -164,7 +184,7 @@ RSpec.describe Backup::Manager do
before do
stub_env('INCREMENTAL', incremental_env)
- allow(ActiveRecord::Base.connection).to receive(:reconnect!)
+ allow(ApplicationRecord.connection).to receive(:reconnect!)
allow(Gitlab::BackupLogger).to receive(:info)
allow(Kernel).to receive(:system).and_return(true)
diff --git a/spec/lib/banzai/cross_project_reference_spec.rb b/spec/lib/banzai/cross_project_reference_spec.rb
index e703bbc4927..8748a910003 100644
--- a/spec/lib/banzai/cross_project_reference_spec.rb
+++ b/spec/lib/banzai/cross_project_reference_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Banzai::CrossProjectReference do
let(:including_class) { Class.new.include(described_class).new }
- let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {}, {})}
+ let(:reference_cache) { Banzai::Filter::References::ReferenceCache.new(including_class, {}, {}) }
before do
allow(including_class).to receive(:context).and_return({})
diff --git a/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb
index a2d35eaa6b6..c581750d2a9 100644
--- a/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb
+++ b/spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Banzai::Filter::BroadcastMessagePlaceholdersFilter do
end
context 'works with empty text' do
- let(:text) {" "}
+ let(:text) { " " }
it { expect(subject).to eq(" ") }
end
@@ -42,13 +42,13 @@ RSpec.describe Banzai::Filter::BroadcastMessagePlaceholdersFilter do
context 'available placeholders' do
context 'replaces the email of the user' do
- let(:text) { "{{email}}"}
+ let(:text) { "{{email}}" }
it { expect(subject).to eq(user.email) }
end
context 'replaces the name of the user' do
- let(:text) { "{{name}}"}
+ let(:text) { "{{name}}" }
it { expect(subject).to eq(user.name) }
end
diff --git a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
index f7cb6b92b48..38f9bda57e6 100644
--- a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
+++ b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Banzai::Filter::CommitTrailersFilter do
let(:secondary_email) { create(:email, :confirmed) }
let(:user) { create(:user) }
- let(:trailer) { "#{FFaker::Lorem.word}-by:"}
+ let(:trailer) { "#{FFaker::Lorem.word}-by:" }
let(:commit_message) { trailer_line(trailer, user.name, user.email) }
let(:commit_message_html) { commit_html(commit_message) }
diff --git a/spec/lib/banzai/filter/task_list_filter_spec.rb b/spec/lib/banzai/filter/task_list_filter_spec.rb
index c89acd1a643..920904b0f29 100644
--- a/spec/lib/banzai/filter/task_list_filter_spec.rb
+++ b/spec/lib/banzai/filter/task_list_filter_spec.rb
@@ -10,4 +10,38 @@ RSpec.describe Banzai::Filter::TaskListFilter do
expect(doc.xpath('.//li//task-button').count).to eq(2)
end
+
+ describe 'inapplicable list items' do
+ shared_examples 'a valid inapplicable task list item' do |html|
+ it "behaves correctly for `#{html}`" do
+ doc = filter("<ul><li>#{html}</li></ul>")
+
+ expect(doc.css('li.inapplicable input[data-inapplicable]').count).to eq(1)
+ expect(doc.css('li.inapplicable > s').count).to eq(1)
+ end
+ end
+
+ shared_examples 'an invalid inapplicable task list item' do |html|
+ it "does nothing for `#{html}`" do
+ doc = filter("<ul><li>#{html}</li></ul>")
+
+ expect(doc.css('li.inapplicable input[data-inapplicable]').count).to eq(0)
+ end
+ end
+
+ it_behaves_like 'a valid inapplicable task list item', '[~] foobar'
+ it_behaves_like 'a valid inapplicable task list item', '[~] foo <em>bar</em>'
+ it_behaves_like 'an invalid inapplicable task list item', '[ ] foobar'
+ it_behaves_like 'an invalid inapplicable task list item', '[x] foobar'
+ it_behaves_like 'an invalid inapplicable task list item', 'foo [~] bar'
+
+ it 'does not wrap a sublist with <s>' do
+ html = '[~] foo <em>bar</em>\n<ol><li>sublist</li></ol>'
+ doc = filter("<ul><li>#{html}</li></ul>")
+
+ expect(doc.to_html).to include('<s>foo <em>bar</em>\n</s>')
+ expect(doc.css('li.inapplicable input[data-inapplicable]').count).to eq(1)
+ expect(doc.css('li.inapplicable > s').count).to eq(1)
+ end
+ end
end
diff --git a/spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb b/spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb
index 09d2919c6c4..4bccae04fda 100644
--- a/spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb
@@ -10,9 +10,9 @@ RSpec.describe Banzai::Pipeline::IncidentManagement::TimelineEventPipeline do
expect(described_class.filters).to eq(
[
*Banzai::Pipeline::PlainMarkdownPipeline.filters,
+ Banzai::Filter::SanitizationFilter,
*Banzai::Pipeline::GfmPipeline.reference_filters,
Banzai::Filter::EmojiFilter,
- Banzai::Filter::SanitizationFilter,
Banzai::Filter::ExternalLinkFilter,
Banzai::Filter::ImageLinkFilter
]
@@ -62,7 +62,32 @@ RSpec.describe Banzai::Pipeline::IncidentManagement::TimelineEventPipeline do
context 'when markdown contains emojis' do
let(:markdown) { ':+1:👍' }
- it { is_expected.to eq('<p>👍👍</p>') }
+ it 'renders emojis wrapped in <gl-emoji> tag' do
+ # rubocop:disable Layout/LineLength
+ is_expected.to eq(
+ %q(<p><gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji><gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji></p>)
+ )
+ # rubocop:enable Layout/LineLength
+ end
+ end
+
+ context 'when markdown contains labels' do
+ let(:label) { create(:label, project: project, title: 'backend') }
+ let(:markdown) { %Q(~"#{label.name}" ~unknown) }
+
+ it 'replaces existing label to a link' do
+ # rubocop:disable Layout/LineLength
+ is_expected.to match(
+ %r(<p>.+<a href=\"[\w/]+-/issues\?label_name=#{label.name}\".+style=\"background-color: #\d{6}\".*>#{label.name}</span></a></span> ~unknown</p>)
+ )
+ # rubocop:enable Layout/LineLength
+ end
+ end
+
+ context 'when markdown contains table' do
+ let(:markdown) { '<table><tr><th>table head</th><tr><tr><td>table content</td></tr></table>' }
+
+ it { is_expected.to eq('table headtable content') }
end
context 'when markdown contains a reference to an issue' do
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index 80392fe264f..536f2a67415 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
it 'converts all reference punctuation to literals' do
reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS
- markdown = reference_chars.split('').map {|char| char.prepend("\\") }.join
+ markdown = reference_chars.split('').map { |char| char.prepend("\\") }.join
punctuation = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.split('')
- punctuation = punctuation.delete_if {|char| char == '&' }
+ punctuation = punctuation.delete_if { |char| char == '&' }
punctuation << '&amp;'
result = described_class.call(markdown, project: project)
diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb
index d487268da78..ae9cf4c5068 100644
--- a/spec/lib/banzai/renderer_spec.rb
+++ b/spec/lib/banzai/renderer_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Banzai::Renderer do
describe '#post_process' do
let(:context_options) { {} }
- let(:html) { 'Consequatur aperiam et nesciunt modi aut assumenda quo id. '}
+ let(:html) { 'Consequatur aperiam et nesciunt modi aut assumenda quo id. ' }
let(:post_processed_html) { double(html_safe: 'safe doc') }
let(:doc) { double(to_html: post_processed_html) }
diff --git a/spec/lib/bitbucket_server/connection_spec.rb b/spec/lib/bitbucket_server/connection_spec.rb
index 873eded58d7..ae73955e1d1 100644
--- a/spec/lib/bitbucket_server/connection_spec.rb
+++ b/spec/lib/bitbucket_server/connection_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe BitbucketServer::Connection do
context 'branch API' do
let(:branch_path) { '/projects/foo/repos/bar/branches' }
let(:branch_url) { 'https://test:7990/rest/branch-utils/1.0/projects/foo/repos/bar/branches' }
- let(:path) { }
+ let(:path) {}
it 'returns JSON body' do
WebMock.stub_request(:delete, branch_url).with(headers: headers).to_return(body: payload.to_json, status: 200, headers: headers)
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index c9730e03311..75c5f363b1f 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -56,15 +56,17 @@ RSpec.describe BulkImports::Clients::HTTP do
[
'http://gitlab.example/api/v4/resource',
hash_including(
- follow_redirects: false,
query: {
page: described_class::DEFAULT_PAGE,
- per_page: described_class::DEFAULT_PER_PAGE
+ per_page: described_class::DEFAULT_PER_PAGE,
+ private_token: token
},
headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
+ 'Content-Type' => 'application/json'
+ },
+ follow_redirects: true,
+ resend_on_redirect: false,
+ limit: 2
)
]
end
@@ -106,12 +108,13 @@ RSpec.describe BulkImports::Clients::HTTP do
def stub_http_get(path, query, response)
uri = "http://gitlab.example/api/v4/#{path}"
params = {
- follow_redirects: false,
- headers: {
- "Authorization" => "Bearer token",
- "Content-Type" => "application/json"
- }
- }.merge(query: query)
+ headers: { "Content-Type" => "application/json" },
+ query: { private_token: token },
+ follow_redirects: true,
+ resend_on_redirect: false,
+ limit: 2
+ }
+ params[:query] = params[:query].merge(query)
allow(Gitlab::HTTP).to receive(:get).with(uri, params).and_return(response)
end
@@ -127,11 +130,17 @@ RSpec.describe BulkImports::Clients::HTTP do
'http://gitlab.example/api/v4/resource',
hash_including(
body: {},
- follow_redirects: false,
headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
+ 'Content-Type' => 'application/json'
+ },
+ query: {
+ page: described_class::DEFAULT_PAGE,
+ per_page: described_class::DEFAULT_PER_PAGE,
+ private_token: token
+ },
+ follow_redirects: true,
+ resend_on_redirect: false,
+ limit: 2
)
]
end
@@ -146,11 +155,17 @@ RSpec.describe BulkImports::Clients::HTTP do
[
'http://gitlab.example/api/v4/resource',
hash_including(
- follow_redirects: false,
headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
+ 'Content-Type' => 'application/json'
+ },
+ query: {
+ page: described_class::DEFAULT_PAGE,
+ per_page: described_class::DEFAULT_PER_PAGE,
+ private_token: token
+ },
+ follow_redirects: true,
+ resend_on_redirect: false,
+ limit: 2
)
]
end
@@ -164,9 +179,16 @@ RSpec.describe BulkImports::Clients::HTTP do
hash_including(
stream_body: true,
headers: {
- 'Content-Type' => 'application/json',
- 'Authorization' => "Bearer #{token}"
- }
+ 'Content-Type' => 'application/json'
+ },
+ query: {
+ page: described_class::DEFAULT_PAGE,
+ per_page: described_class::DEFAULT_PER_PAGE,
+ private_token: token
+ },
+ follow_redirects: true,
+ resend_on_redirect: false,
+ limit: 2
)
]
diff --git a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
index b769aa4af5a..f0b461e518e 100644
--- a/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/lfs_objects_pipeline_spec.rb
@@ -11,8 +11,8 @@ RSpec.describe BulkImports::Common::Pipelines::LfsObjectsPipeline do
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:lfs_dir_path) { tmpdir }
- let(:lfs_json_file_path) { File.join(lfs_dir_path, 'lfs_objects.json')}
- let(:lfs_file_path) { File.join(lfs_dir_path, oid)}
+ let(:lfs_json_file_path) { File.join(lfs_dir_path, 'lfs_objects.json') }
+ let(:lfs_file_path) { File.join(lfs_dir_path, oid) }
subject(:pipeline) { described_class.new(context) }
diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
index 9d43bb3ebfb..f650e931dc7 100644
--- a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do
let(:tmpdir) { Dir.mktmpdir }
let(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') }
- let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')}
+ let(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt') }
let(:tracker) { create(:bulk_import_tracker, entity: entity) }
let(:context) { BulkImports::Pipeline::Context.new(tracker) }
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index d775cf6b026..896af865c56 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
:bulk_import_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
- destination_name: 'destination-name-path',
+ destination_slug: 'destination-slug-path',
destination_namespace: parent.full_path
)
end
@@ -41,14 +41,14 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
'name' => 'Name',
'description' => 'Description',
'parent_id' => parent.id,
- 'path' => 'destination-name-path'
+ 'path' => 'destination-slug-path'
})
end
- it 'transforms path from destination_name' do
+ it 'transforms path from destination_slug' do
transformed_data = subject.transform(context, data)
- expect(transformed_data['path']).to eq(entity.destination_name)
+ expect(transformed_data['path']).to eq(entity.destination_slug)
end
it 'removes full path' do
diff --git a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
index 2633598b48d..a376cdd712c 100644
--- a/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/releases_pipeline_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
}
end
- let(:attributes) {{ 'links' => [link] }}
+ let(:attributes) { { 'links' => [link] } }
it 'restores release links' do
pipeline.run
@@ -106,7 +106,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
}
end
- let(:attributes) {{ 'milestone_releases' => [{ 'milestone' => milestone }] }}
+ let(:attributes) { { 'milestone_releases' => [{ 'milestone' => milestone }] } }
it 'restores release milestone' do
pipeline.run
@@ -133,7 +133,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
end
context 'when release is historical' do
- let(:attributes) {{ 'released_at' => '2018-12-26T10:17:14.621Z' }}
+ let(:attributes) { { 'released_at' => '2018-12-26T10:17:14.621Z' } }
it 'does not create release evidence' do
expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
@@ -143,7 +143,7 @@ RSpec.describe BulkImports::Projects::Pipelines::ReleasesPipeline do
end
context 'when release is upcoming' do
- let(:attributes) {{ 'released_at' => Time.zone.now + 30.days }}
+ let(:attributes) { { 'released_at' => Time.zone.now + 30.days } }
it 'does not create release evidence' do
expect(::Releases::CreateEvidenceWorker).not_to receive(:perform_async)
diff --git a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
index 9897e74ec7b..4d12b49e2c0 100644
--- a/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/snippets_repository_pipeline_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe BulkImports::Projects::Pipelines::SnippetsRepositoryPipeline do
let(:project) { create(:project) }
let(:bulk_import) { create(:bulk_import, user: user) }
let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z")}
+ let!(:matched_snippet) { create(:snippet, project: project, created_at: "1981-12-13T23:59:59Z") }
let(:entity) do
create(
:bulk_import_entity,
diff --git a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
index a1d77b9732d..c1c4d0bf0db 100644
--- a/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/projects/transformers/project_attributes_transformer_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer
source_type: :project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
- destination_name: 'Destination Project Name',
+ destination_slug: 'Destination Project Name',
destination_namespace: destination_group.full_path
)
end
@@ -32,12 +32,12 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer
subject(:transformed_data) { described_class.new.transform(context, data) }
- it 'transforms name to destination name' do
- expect(transformed_data[:name]).to eq(entity.destination_name)
+ it 'transforms name to destination slug' do
+ expect(transformed_data[:name]).to eq(entity.destination_slug)
end
it 'adds path as parameterized name' do
- expect(transformed_data[:path]).to eq(entity.destination_name.parameterize)
+ expect(transformed_data[:path]).to eq(entity.destination_slug.parameterize)
end
it 'transforms visibility level' do
@@ -65,7 +65,7 @@ RSpec.describe BulkImports::Projects::Transformers::ProjectAttributesTransformer
source_type: :project_entity,
bulk_import: bulk_import,
source_full_path: 'source/full/path',
- destination_name: 'Destination Project Name',
+ destination_slug: 'Destination Project Name',
destination_namespace: ''
)
diff --git a/spec/lib/container_registry/gitlab_api_client_spec.rb b/spec/lib/container_registry/gitlab_api_client_spec.rb
index f2c627734a3..7836d8706f6 100644
--- a/spec/lib/container_registry/gitlab_api_client_spec.rb
+++ b/spec/lib/container_registry/gitlab_api_client_spec.rb
@@ -212,6 +212,105 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
end
end
+ describe '#tags' do
+ let(:path) { 'namespace/path/to/repository' }
+ let(:page_size) { 100 }
+ let(:last) { nil }
+ let(:response) do
+ [
+ {
+ name: '0.1.0',
+ digest: 'sha256:1234567890',
+ media_type: 'application/vnd.oci.image.manifest.v1+json',
+ size_bytes: 1234567890,
+ created_at: 5.minutes.ago
+ },
+ {
+ name: 'latest',
+ digest: 'sha256:1234567892',
+ media_type: 'application/vnd.oci.image.manifest.v1+json',
+ size_bytes: 1234567892,
+ created_at: 10.minutes.ago
+ }
+ ]
+ end
+
+ subject { client.tags(path, page_size: page_size, last: last) }
+
+ context 'with valid parameters' do
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with a response with a link header' do
+ let(:next_page_url) { 'http://sandbox.org/test?last=b' }
+ let(:expected) do
+ {
+ pagination: { next: { uri: URI(next_page_url) } },
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, next_page_url: next_page_url, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with a large page size set' do
+ let(:page_size) { described_class::MAX_TAGS_PAGE_SIZE + 1000 }
+
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: described_class::MAX_TAGS_PAGE_SIZE, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with a last parameter set' do
+ let(:last) { 'test' }
+
+ let(:expected) do
+ {
+ pagination: {},
+ response_body: ::Gitlab::Json.parse(response.to_json)
+ }
+ end
+
+ before do
+ stub_tags(path, page_size: page_size, last: last, respond_with: response)
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with non successful response' do
+ before do
+ stub_tags(path, page_size: page_size, status_code: 404)
+ end
+
+ it { is_expected.to eq({}) }
+ end
+ end
+
describe '.supports_gitlab_api?' do
subject { described_class.supports_gitlab_api? }
@@ -389,4 +488,30 @@ RSpec.describe ContainerRegistry::GitlabApiClient do
.with(headers: headers)
.to_return(status: status_code, body: respond_with.to_json, headers: { 'Content-Type' => described_class::JSON_TYPE })
end
+
+ def stub_tags(path, page_size: nil, last: nil, next_page_url: nil, status_code: 200, respond_with: {})
+ params = { n: page_size, last: last }.compact
+
+ url = "#{registry_api_url}/gitlab/v1/repositories/#{path}/tags/list/"
+
+ if params.present?
+ url += "?#{params.map { |param, val| "#{param}=#{val}" }.join('&')}"
+ end
+
+ request_headers = { 'Accept' => described_class::JSON_TYPE }
+ request_headers['Authorization'] = "bearer #{token}" if token
+
+ response_headers = { 'Content-Type' => described_class::JSON_TYPE }
+ if next_page_url
+ response_headers['Link'] = "<#{next_page_url}>; rel=\"next\""
+ end
+
+ stub_request(:get, url)
+ .with(headers: request_headers)
+ .to_return(
+ status: status_code,
+ body: respond_with.to_json,
+ headers: response_headers
+ )
+ end
end
diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb
index 9b931ab6dbc..190ddef0cd5 100644
--- a/spec/lib/container_registry/tag_spec.rb
+++ b/spec/lib/container_registry/tag_spec.rb
@@ -205,6 +205,41 @@ RSpec.describe ContainerRegistry::Tag do
it_behaves_like 'a processable'
end
+
+ describe '#force_created_at_from_iso8601' do
+ subject { tag.force_created_at_from_iso8601(input) }
+
+ shared_examples 'setting and caching the created_at value' do
+ it 'sets and caches the created_at value' do
+ expect(tag).not_to receive(:config)
+
+ subject
+
+ expect(tag.created_at).to eq(expected_value)
+ end
+ end
+
+ context 'with a valid input' do
+ let(:input) { 2.days.ago.iso8601 }
+ let(:expected_value) { DateTime.iso8601(input) }
+
+ it_behaves_like 'setting and caching the created_at value'
+ end
+
+ context 'with a nil input' do
+ let(:input) { nil }
+ let(:expected_value) { nil }
+
+ it_behaves_like 'setting and caching the created_at value'
+ end
+
+ context 'with an invalid input' do
+ let(:input) { 'not a timestamp' }
+ let(:expected_value) { nil }
+
+ it_behaves_like 'setting and caching the created_at value'
+ end
+ end
end
end
end
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 4db3f04717b..56e0b4bca30 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -491,8 +491,8 @@ RSpec.describe Feature, stub_feature_flags: false do
end
shared_examples_for 'logging' do
- let(:expected_action) { }
- let(:expected_extra) { }
+ let(:expected_action) {}
+ let(:expected_extra) {}
it 'logs the event' do
expect(Feature.logger).to receive(:info).with(key: key, action: expected_action, **expected_extra)
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
index d3c1a96253c..ad2a3c7b462 100644
--- a/spec/lib/gitlab/alert_management/payload/base_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -228,6 +228,46 @@ RSpec.describe Gitlab::AlertManagement::Payload::Base do
it { is_expected.to eq({ hosts: shortened_hosts, project_id: project.id }) }
end
end
+
+ context 'with present, non-string values for string fields' do
+ let_it_be(:stubs) do
+ {
+ description: { "description" => "description" },
+ monitoring_tool: ['datadog', 5],
+ service: 4356875,
+ title: true
+ }
+ end
+
+ before do
+ allow(parsed_payload).to receive_messages(stubs)
+ end
+
+ it 'casts values to strings' do
+ is_expected.to eq({
+ description: "{\"description\"=>\"description\"}",
+ monitoring_tool: "[\"datadog\", 5]",
+ service: '4356875',
+ project_id: project.id,
+ title: "true"
+ })
+ end
+ end
+
+ context 'with blank values for string fields' do
+ let_it_be(:stubs) do
+ {
+ description: nil,
+ monitoring_tool: '',
+ service: {},
+ title: []
+ }
+ end
+
+ it 'leaves the fields blank' do
+ is_expected.to eq({ project_id: project.id })
+ end
+ end
end
describe '#gitlab_fingerprint' do
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index f9e18a65af4..8b2a228b935 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe Gitlab::ApplicationContext do
end
it 'raises an error when passing invalid options' do
- expect { described_class.push(no: 'option')}.to raise_error(ArgumentError)
+ expect { described_class.push(no: 'option') }.to raise_error(ArgumentError)
end
end
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index 177ce1134d8..41e79f811fa 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -111,23 +111,35 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
shared_examples 'throttles based on key and scope' do
let(:start_time) { Time.current.beginning_of_hour }
- it 'returns true when threshold is exceeded' do
+ let(:threshold) { nil }
+ let(:interval) { nil }
+
+ it 'returns true when threshold is exceeded', :aggregate_failures do
travel_to(start_time) do
- expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(false)
end
travel_to(start_time + 1.minute) do
- expect(subject.throttled?(:test_action, scope: scope)).to eq(true)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(true)
# Assert that it does not affect other actions or scope
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
- expect(subject.throttled?(:test_action, scope: [user])).to eq(false)
+
+ expect(subject.throttled?(
+ :test_action, scope: [user], threshold: threshold, interval: interval)
+ ).to eq(false)
end
end
- it 'returns false when interval has elapsed' do
+ it 'returns false when interval has elapsed', :aggregate_failures do
travel_to(start_time) do
- expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(false)
# another_action has a threshold of 2 so we simulate 2 requests
expect(subject.throttled?(:another_action, scope: scope)).to eq(false)
@@ -135,21 +147,34 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
end
travel_to(start_time + 2.minutes) do
- expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(false)
# Assert that another_action has its own interval that hasn't elapsed
expect(subject.throttled?(:another_action, scope: scope)).to eq(true)
end
end
- it 'allows peeking at the current state without changing its value' do
+ it 'allows peeking at the current state without changing its value', :aggregate_failures do
travel_to(start_time) do
- expect(subject.throttled?(:test_action, scope: scope)).to eq(false)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(false)
+
2.times do
- expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(false)
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval, peek: true)
+ ).to eq(false)
end
- expect(subject.throttled?(:test_action, scope: scope)).to eq(true)
- expect(subject.throttled?(:test_action, scope: scope, peek: true)).to eq(true)
+
+ expect(subject.throttled?(
+ :test_action, scope: scope, threshold: threshold, interval: interval)
+ ).to eq(true)
+
+ expect(subject.throttled?(
+ :test_action, scope: scope, peek: true, threshold: threshold, interval: interval)
+ ).to eq(true)
end
end
end
@@ -165,6 +190,28 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
it_behaves_like 'throttles based on key and scope'
end
+
+ context 'when threshold and interval get overwritten from rate_limits' do
+ let(:rate_limits) do
+ {
+ test_action: {
+ threshold: 0,
+ interval: 0
+ },
+ another_action: {
+ threshold: -> { 2 },
+ interval: -> { 3.minutes }
+ }
+ }
+ end
+
+ let(:scope) { [user, project] }
+
+ it_behaves_like 'throttles based on key and scope' do
+ let(:threshold) { 1 }
+ let(:interval) { 2.minutes }
+ end
+ end
end
describe '.peek' do
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index bfea1315d90..b2bce2076b0 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -791,7 +791,7 @@ module Gitlab
end
context 'when the file does not exist' do
- it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")}
+ it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]") }
end
end
diff --git a/spec/lib/gitlab/audit/auditor_spec.rb b/spec/lib/gitlab/audit/auditor_spec.rb
new file mode 100644
index 00000000000..fc5917ca583
--- /dev/null
+++ b/spec/lib/gitlab/audit/auditor_spec.rb
@@ -0,0 +1,258 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::Auditor do
+ let(:name) { 'audit_operation' }
+ let(:author) { create(:user) }
+ let(:group) { create(:group) }
+ let(:provider) { 'standard' }
+ let(:context) do
+ { name: name,
+ author: author,
+ scope: group,
+ target: group,
+ authentication_event: true,
+ authentication_provider: provider,
+ message: "Signed in using standard authentication" }
+ end
+
+ let(:logger) { instance_spy(Gitlab::AuditJsonLogger) }
+
+ subject(:auditor) { described_class }
+
+ describe '.audit' do
+ context 'when authentication event' do
+ let(:audit!) { auditor.audit(context) }
+
+ it 'creates an authentication event' do
+ expect(AuthenticationEvent).to receive(:new).with(
+ {
+ user: author,
+ user_name: author.name,
+ ip_address: author.current_sign_in_ip,
+ result: AuthenticationEvent.results[:success],
+ provider: provider
+ }
+ ).and_call_original
+
+ audit!
+ end
+
+ it 'logs audit events to database', :aggregate_failures do
+ freeze_time do
+ audit!
+
+ audit_event = AuditEvent.last
+
+ expect(audit_event.author_id).to eq(author.id)
+ expect(audit_event.entity_id).to eq(group.id)
+ expect(audit_event.entity_type).to eq(group.class.name)
+ expect(audit_event.created_at).to eq(Time.zone.now)
+ expect(audit_event.details[:target_id]).to eq(group.id)
+ expect(audit_event.details[:target_type]).to eq(group.class.name)
+ end
+ end
+
+ it 'logs audit events to file' do
+ expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
+
+ audit!
+
+ expect(logger).to have_received(:info).with(
+ hash_including(
+ 'author_id' => author.id,
+ 'author_name' => author.name,
+ 'entity_id' => group.id,
+ 'entity_type' => group.class.name,
+ 'details' => kind_of(Hash)
+ )
+ )
+ end
+
+ context 'when overriding the create datetime' do
+ let(:context) do
+ { name: name,
+ author: author,
+ scope: group,
+ target: group,
+ created_at: 3.weeks.ago,
+ authentication_event: true,
+ authentication_provider: provider,
+ message: "Signed in using standard authentication" }
+ end
+
+ it 'logs audit events to database', :aggregate_failures do
+ freeze_time do
+ audit!
+
+ audit_event = AuditEvent.last
+
+ expect(audit_event.author_id).to eq(author.id)
+ expect(audit_event.entity_id).to eq(group.id)
+ expect(audit_event.entity_type).to eq(group.class.name)
+ expect(audit_event.created_at).to eq(3.weeks.ago)
+ expect(audit_event.details[:target_id]).to eq(group.id)
+ expect(audit_event.details[:target_type]).to eq(group.class.name)
+ end
+ end
+
+ it 'logs audit events to file' do
+ freeze_time do
+ expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
+
+ audit!
+
+ expect(logger).to have_received(:info).with(
+ hash_including(
+ 'author_id' => author.id,
+ 'author_name' => author.name,
+ 'entity_id' => group.id,
+ 'entity_type' => group.class.name,
+ 'details' => kind_of(Hash),
+ 'created_at' => 3.weeks.ago.iso8601(3)
+ )
+ )
+ end
+ end
+ end
+
+ context 'when overriding the additional_details' do
+ additional_details = { action: :custom, from: false, to: true }
+ let(:context) do
+ { name: name,
+ author: author,
+ scope: group,
+ target: group,
+ created_at: Time.zone.now,
+ additional_details: additional_details,
+ authentication_event: true,
+ authentication_provider: provider,
+ message: "Signed in using standard authentication" }
+ end
+
+ it 'logs audit events to database' do
+ freeze_time do
+ audit!
+
+ expect(AuditEvent.last.details).to include(additional_details)
+ end
+ end
+
+ it 'logs audit events to file' do
+ freeze_time do
+ expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
+
+ audit!
+
+ expect(logger).to have_received(:info).with(
+ hash_including(
+ 'details' => hash_including('action' => 'custom', 'from' => 'false', 'to' => 'true'),
+ 'action' => 'custom',
+ 'from' => 'false',
+ 'to' => 'true'
+ )
+ )
+ end
+ end
+ end
+
+ context 'when overriding the target_details' do
+ target_details = "this is my target details"
+ let(:context) do
+ {
+ name: name,
+ author: author,
+ scope: group,
+ target: group,
+ created_at: Time.zone.now,
+ target_details: target_details,
+ authentication_event: true,
+ authentication_provider: provider,
+ message: "Signed in using standard authentication"
+ }
+ end
+
+ it 'logs audit events to database' do
+ freeze_time do
+ audit!
+
+ audit_event = AuditEvent.last
+ expect(audit_event.details).to include({ target_details: target_details })
+ expect(audit_event.target_details).to eq(target_details)
+ end
+ end
+
+ it 'logs audit events to file' do
+ freeze_time do
+ expect(::Gitlab::AuditJsonLogger).to receive(:build).and_return(logger)
+
+ audit!
+
+ expect(logger).to have_received(:info).with(
+ hash_including(
+ 'details' => hash_including('target_details' => target_details),
+ 'target_details' => target_details
+ )
+ )
+ end
+ end
+ end
+ end
+
+ context 'when authentication event is false' do
+ let(:context) do
+ { name: name, author: author, scope: group,
+ target: group, authentication_event: false, message: "sample message" }
+ end
+
+ it 'does not create an authentication event' do
+ expect { auditor.audit(context) }.not_to change(AuthenticationEvent, :count)
+ end
+ end
+
+ context 'when authentication event is invalid' do
+ let(:audit!) { auditor.audit(context) }
+
+ before do
+ allow(AuthenticationEvent).to receive(:new).and_raise(ActiveRecord::RecordInvalid)
+ allow(Gitlab::ErrorTracking).to receive(:track_exception)
+ end
+
+ it 'tracks error' do
+ audit!
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(
+ kind_of(ActiveRecord::RecordInvalid),
+ { audit_operation: name }
+ )
+ end
+
+ it 'does not throw exception' do
+ expect { auditor.audit(context) }.not_to raise_exception
+ end
+ end
+
+ context 'when audit events are invalid' do
+ let(:audit!) { auditor.audit(context) }
+
+ before do
+ allow(AuditEvent).to receive(:bulk_insert!).and_raise(ActiveRecord::RecordInvalid)
+ allow(Gitlab::ErrorTracking).to receive(:track_exception)
+ end
+
+ it 'tracks error' do
+ audit!
+
+ expect(Gitlab::ErrorTracking).to have_received(:track_exception).with(
+ kind_of(ActiveRecord::RecordInvalid),
+ { audit_operation: name }
+ )
+ end
+
+ it 'does not throw exception' do
+ expect { auditor.audit(context) }.not_to raise_exception
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/audit/ci_runner_token_author_spec.rb b/spec/lib/gitlab/audit/ci_runner_token_author_spec.rb
index f55e1b44936..89664c57084 100644
--- a/spec/lib/gitlab/audit/ci_runner_token_author_spec.rb
+++ b/spec/lib/gitlab/audit/ci_runner_token_author_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Audit::CiRunnerTokenAuthor do
describe '.initialize' do
subject { described_class.new(audit_event) }
- let(:details) { }
+ let(:details) {}
let(:audit_event) { instance_double(AuditEvent, details: details, entity_type: 'Project', entity_path: 'd/e') }
context 'with runner_authentication_token' do
diff --git a/spec/lib/gitlab/audit/deploy_key_author_spec.rb b/spec/lib/gitlab/audit/deploy_key_author_spec.rb
new file mode 100644
index 00000000000..72444f77c91
--- /dev/null
+++ b/spec/lib/gitlab/audit/deploy_key_author_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::DeployKeyAuthor do
+ describe '#initialize' do
+ it 'sets correct attributes' do
+ expect(described_class.new(name: 'Lorem deploy key'))
+ .to have_attributes(id: -3, name: 'Lorem deploy key')
+ end
+
+ it 'sets default name when it is not provided' do
+ expect(described_class.new)
+ .to have_attributes(id: -3, name: 'Deploy Key')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/audit/null_author_spec.rb b/spec/lib/gitlab/audit/null_author_spec.rb
index 2045139a5f7..e2f71a34534 100644
--- a/spec/lib/gitlab/audit/null_author_spec.rb
+++ b/spec/lib/gitlab/audit/null_author_spec.rb
@@ -57,6 +57,15 @@ RSpec.describe Gitlab::Audit::NullAuthor do
expect(subject.for(-2, audit_event)).to be_a(Gitlab::Audit::DeployTokenAuthor)
expect(subject.for(-2, audit_event)).to have_attributes(id: -2, name: 'Test deploy token')
end
+
+ it 'returns DeployKeyAuthor when id equals -3', :aggregate_failures do
+ allow(audit_event).to receive(:[]).with(:author_name).and_return('Test deploy key')
+ allow(audit_event).to receive(:details).and_return({})
+ allow(audit_event).to receive(:target_type)
+
+ expect(subject.for(-3, audit_event)).to be_a(Gitlab::Audit::DeployKeyAuthor)
+ expect(subject.for(-3, audit_event)).to have_attributes(id: -3, name: 'Test deploy key')
+ end
end
describe '#current_sign_in_ip' do
diff --git a/spec/lib/gitlab/audit/null_target_spec.rb b/spec/lib/gitlab/audit/null_target_spec.rb
new file mode 100644
index 00000000000..f192e0cd8db
--- /dev/null
+++ b/spec/lib/gitlab/audit/null_target_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::NullTarget do
+ subject { described_class.new }
+
+ describe '#id' do
+ it 'returns nil' do
+ expect(subject.id).to eq(nil)
+ end
+ end
+
+ describe '#type' do
+ it 'returns nil' do
+ expect(subject.type).to eq(nil)
+ end
+ end
+
+ describe '#details' do
+ it 'returns nil' do
+ expect(subject.details).to eq(nil)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/audit/target_spec.rb b/spec/lib/gitlab/audit/target_spec.rb
new file mode 100644
index 00000000000..5c06cd117a9
--- /dev/null
+++ b/spec/lib/gitlab/audit/target_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Audit::Target do
+ let(:object) { double('object') } # rubocop:disable RSpec/VerifiedDoubles
+
+ subject { described_class.new(object) }
+
+ describe '#id' do
+ it 'returns object id' do
+ allow(object).to receive(:id).and_return(object_id)
+
+ expect(subject.id).to eq(object_id)
+ end
+ end
+
+ describe '#type' do
+ it 'returns object class name' do
+ allow(object).to receive_message_chain(:class, :name).and_return('User')
+
+ expect(subject.type).to eq('User')
+ end
+ end
+
+ describe '#details' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :audit_details, :details) do
+ 'jackie' | 'wanderer' | 'jackie'
+ 'jackie' | nil | 'jackie'
+ nil | 'wanderer' | 'wanderer'
+ nil | nil | 'unknown'
+ end
+
+ before do
+ allow(object).to receive(:name).and_return(name) if name
+ allow(object).to receive(:audit_details).and_return(audit_details) if audit_details
+ end
+
+ with_them do
+ it 'returns details' do
+ expect(subject.details).to eq(details)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb
index e985f66bfe9..d0b44135a2f 100644
--- a/spec/lib/gitlab/auth/auth_finders_spec.rb
+++ b/spec/lib/gitlab/auth/auth_finders_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
let(:doorkeeper_access_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: 'api') }
before do
- set_bearer_token(doorkeeper_access_token.token)
+ set_bearer_token(doorkeeper_access_token.plaintext_token)
end
it { is_expected.to eq user }
@@ -577,7 +577,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
context 'passed as header' do
before do
- set_bearer_token(doorkeeper_access_token.token)
+ set_bearer_token(doorkeeper_access_token.plaintext_token)
end
it 'returns token if valid oauth_access_token' do
@@ -587,7 +587,7 @@ RSpec.describe Gitlab::Auth::AuthFinders do
context 'passed as param' do
it 'returns user if valid oauth_access_token' do
- set_param(:access_token, doorkeeper_access_token.token)
+ set_param(:access_token, doorkeeper_access_token.plaintext_token)
expect(find_oauth_access_token.token).to eq doorkeeper_access_token.token
end
diff --git a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
index f23fdd3fbcb..3d9be4c3489 100644
--- a/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/auth/ip_rate_limiter_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin
}
end
- subject { described_class.new(ip) }
+ subject(:rate_limiter) { described_class.new(ip) }
before do
stub_rack_attack_setting(options)
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin
end
after do
- subject.reset!
+ rate_limiter.reset!
end
describe '#register_fail!' do
@@ -86,7 +86,7 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin
end
end
- context 'when IP is whitlisted' do
+ context 'when IP is allow listed' do
let(:ip) { '127.0.0.1' }
it_behaves_like 'skips the rate limiter'
@@ -97,4 +97,20 @@ RSpec.describe Gitlab::Auth::IpRateLimiter, :use_clean_rails_memory_store_cachin
it_behaves_like 'skips the rate limiter'
end
+
+ describe '#trusted_ip?' do
+ subject { rate_limiter.trusted_ip? }
+
+ context 'when ip is in the trusted list' do
+ let(:ip) { '127.0.0.1' }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when ip is not in the trusted list' do
+ let(:ip) { '10.0.0.1' }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
index 69068883096..a044094179c 100644
--- a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do
)
end
+ let(:provider_config) { { 'args' => { 'gitlab_username_claim' => 'first_name' } } }
let(:uid_raw) do
+"CN=Onur K\xC3\xBC\xC3\xA7\xC3\xBCk,OU=Test,DC=example,DC=net"
end
@@ -35,6 +36,7 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do
let(:email_utf8) { email_ascii.force_encoding(Encoding::UTF_8) }
let(:nickname_utf8) { nickname_ascii.force_encoding(Encoding::UTF_8) }
let(:name_utf8) { name_ascii.force_encoding(Encoding::UTF_8) }
+ let(:first_name_utf8) { first_name_ascii.force_encoding(Encoding::UTF_8) }
let(:info_hash) do
{
@@ -91,6 +93,34 @@ RSpec.describe Gitlab::Auth::OAuth::AuthHash do
end
end
+ context 'custom username field provided' do
+ before do
+ allow(Gitlab::Auth::OAuth::Provider).to receive(:config_for).and_return(provider_config)
+ end
+
+ it 'uses the custom field for the username' do
+ expect(auth_hash.username).to eql first_name_utf8
+ end
+
+ it 'uses the default claim for the username when the custom claim is not found' do
+ provider_config['args']['gitlab_username_claim'] = 'nonexistent'
+
+ expect(auth_hash.username).to eql nickname_utf8
+ end
+
+ it 'uses the default claim for the username when the custom claim is empty' do
+ info_hash[:first_name] = ''
+
+ expect(auth_hash.username).to eql nickname_utf8
+ end
+
+ it 'uses the default claim for the username when the custom claim is nil' do
+ info_hash[:first_name] = nil
+
+ expect(auth_hash.username).to eql nickname_utf8
+ end
+ end
+
context 'auth_hash constructed with ASCII-8BIT encoding' do
it 'forces utf8 encoding on uid' do
expect(auth_hash.uid.encoding).to eql Encoding::UTF_8
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index 5f5e7f211f8..b160f322fb8 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -727,6 +727,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
context 'signup with linked omniauth and LDAP account' do
before do
stub_omniauth_config(auto_link_ldap_user: true)
+ stub_ldap_setting(enabled: true)
allow(ldap_user).to receive(:uid) { uid }
allow(ldap_user).to receive(:username) { uid }
allow(ldap_user).to receive(:email) { ['johndoe@example.com', 'john2@example.com'] }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 1e869df0988..c2d64aa2fb3 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
context 'when IP is already banned' do
- subject { gl_auth.find_for_git_client('username', 'password', project: nil, ip: 'ip') }
+ subject { gl_auth.find_for_git_client('username-does-not-matter', 'password-does-not-matter', project: nil, ip: 'ip') }
before do
expect_next_instance_of(Gitlab::Auth::IpRateLimiter) do |rate_limiter|
@@ -219,16 +219,16 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'recognizes master passwords' do
- user = create(:user, password: 'password')
+ user = create(:user)
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
+ expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
include_examples 'user login operation with unique ip limit' do
- let(:user) { create(:user, password: 'password') }
+ let(:user) { create(:user) }
def operation
- expect(gl_auth.find_for_git_client(user.username, 'password', project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
+ expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip')).to have_attributes(actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities)
end
end
@@ -502,8 +502,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user = create(
:user,
:blocked,
- username: 'normal_user',
- password: 'my-secret'
+ username: 'normal_user'
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -512,7 +511,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when 2fa is enabled globally' do
let_it_be(:user) do
- create(:user, username: 'normal_user', password: 'my-secret', otp_grace_period_started_at: 1.day.ago)
+ create(:user, username: 'normal_user', otp_grace_period_started_at: 1.day.ago)
end
before do
@@ -536,7 +535,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when 2fa is enabled personally' do
let(:user) do
- create(:user, :two_factor, username: 'normal_user', password: 'my-secret', otp_grace_period_started_at: 1.day.ago)
+ create(:user, :two_factor, username: 'normal_user', otp_grace_period_started_at: 1.day.ago)
end
it 'fails' do
@@ -548,8 +547,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'goes through lfs authentication' do
user = create(
:user,
- username: 'normal_user',
- password: 'my-secret'
+ username: 'normal_user'
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -559,8 +557,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'goes through oauth authentication when the username is oauth2' do
user = create(
:user,
- username: 'oauth2',
- password: 'my-secret'
+ username: 'oauth2'
)
expect(gl_auth.find_for_git_client(user.username, user.password, project: nil, ip: 'ip'))
@@ -635,7 +632,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'when deploy token and user have the same username' do
let(:username) { 'normal_user' }
- let(:user) { create(:user, username: username, password: 'my-secret') }
+ let(:user) { create(:user, username: username) }
let(:deploy_token) { create(:deploy_token, username: username, read_registry: false, projects: [project]) }
it 'succeeds for the token' do
@@ -648,7 +645,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'succeeds for the user' do
auth_success = { actor: user, project: nil, type: :gitlab_or_ldap, authentication_abilities: described_class.full_authentication_abilities }
- expect(gl_auth.find_for_git_client(username, 'my-secret', project: project, ip: 'ip'))
+ expect(gl_auth.find_for_git_client(username, user.password, project: project, ip: 'ip'))
.to have_attributes(auth_success)
end
end
@@ -834,72 +831,64 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
describe 'find_with_user_password' do
- let!(:user) do
- create(:user,
- username: username,
- password: password,
- password_confirmation: password)
- end
-
+ let!(:user) { create(:user, username: username) }
let(:username) { 'John' } # username isn't lowercase, test this
- let(:password) { 'my-secret' }
it "finds user by valid login/password" do
- expect(gl_auth.find_with_user_password(username, password)).to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).to eql user
end
it 'finds user by valid email/password with case-insensitive email' do
- expect(gl_auth.find_with_user_password(user.email.upcase, password)).to eql user
+ expect(gl_auth.find_with_user_password(user.email.upcase, user.password)).to eql user
end
it 'finds user by valid username/password with case-insensitive username' do
- expect(gl_auth.find_with_user_password(username.upcase, password)).to eql user
+ expect(gl_auth.find_with_user_password(username.upcase, user.password)).to eql user
end
it "does not find user with invalid password" do
- password = 'wrong'
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ expect(gl_auth.find_with_user_password(username, 'incorrect_password')).not_to eql user
end
it "does not find user with invalid login" do
- user = 'wrong'
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ username = 'wrong'
+ expect(gl_auth.find_with_user_password(username, user.password)).not_to eql user
end
include_examples 'user login operation with unique ip limit' do
def operation
- expect(gl_auth.find_with_user_password(username, password)).to eq(user)
+ expect(gl_auth.find_with_user_password(username, user.password)).to eq(user)
end
end
it 'finds the user in deactivated state' do
user.deactivate!
- expect(gl_auth.find_with_user_password(username, password)).to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).to eql user
end
it "does not find user in blocked state" do
user.block
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).not_to eql user
end
it 'does not find user in locked state' do
user.lock_access!
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).not_to eql user
end
it "does not find user in ldap_blocked state" do
user.ldap_block
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).not_to eql user
end
it 'does not find user in blocked_pending_approval state' do
user.block_pending_approval
- expect(gl_auth.find_with_user_password(username, password)).not_to eql user
+ expect(gl_auth.find_with_user_password(username, user.password)).not_to eql user
end
context 'with increment_failed_attempts' do
@@ -917,7 +906,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user.save!
expect do
- gl_auth.find_with_user_password(username, password, increment_failed_attempts: true)
+ gl_auth.find_with_user_password(username, user.password, increment_failed_attempts: true)
user.reload
end.to change(user, :failed_attempts).from(2).to(0)
end
@@ -946,7 +935,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
user.save!
expect do
- gl_auth.find_with_user_password(username, password, increment_failed_attempts: true)
+ gl_auth.find_with_user_password(username, user.password, increment_failed_attempts: true)
user.reload
end.not_to change(user, :failed_attempts)
end
@@ -961,7 +950,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it "tries to autheticate with db before ldap" do
expect(Gitlab::Auth::Ldap::Authentication).not_to receive(:login)
- expect(gl_auth.find_with_user_password(username, password)).to eq(user)
+ expect(gl_auth.find_with_user_password(username, user.password)).to eq(user)
end
it "does not find user by using ldap as fallback to for authentication" do
@@ -983,7 +972,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it "does not find user by valid login/password" do
- expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ expect(gl_auth.find_with_user_password(username, user.password)).to be_nil
end
context "with ldap enabled" do
@@ -992,7 +981,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it "does not find non-ldap user by valid login/password" do
- expect(gl_auth.find_with_user_password(username, password)).to be_nil
+ expect(gl_auth.find_with_user_password(username, user.password)).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb
deleted file mode 100644
index 8980a26932b..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_ci_namespace_mirrors_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiNamespaceMirrors, :migration, schema: 20211208122200 do
- let(:namespaces) { table(:namespaces) }
- let(:ci_namespace_mirrors) { table(:ci_namespace_mirrors) }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'creates hierarchies for all namespaces in range' do
- namespaces.create!(id: 5, name: 'test1', path: 'test1')
- namespaces.create!(id: 7, name: 'test2', path: 'test2')
- namespaces.create!(id: 8, name: 'test3', path: 'test3')
-
- subject.perform(5, 7)
-
- expect(ci_namespace_mirrors.all).to contain_exactly(
- an_object_having_attributes(namespace_id: 5, traversal_ids: [5]),
- an_object_having_attributes(namespace_id: 7, traversal_ids: [7])
- )
- end
-
- it 'handles existing hierarchies gracefully' do
- namespaces.create!(id: 5, name: 'test1', path: 'test1')
- test2 = namespaces.create!(id: 7, name: 'test2', path: 'test2')
- namespaces.create!(id: 8, name: 'test3', path: 'test3', parent_id: 7)
- namespaces.create!(id: 9, name: 'test4', path: 'test4')
-
- # Simulate a situation where a user has had a chance to move a group to another parent
- # before the background migration has had a chance to run
- test2.update!(parent_id: 5)
- ci_namespace_mirrors.create!(namespace_id: test2.id, traversal_ids: [5, 7])
-
- subject.perform(5, 8)
-
- expect(ci_namespace_mirrors.all).to contain_exactly(
- an_object_having_attributes(namespace_id: 5, traversal_ids: [5]),
- an_object_having_attributes(namespace_id: 7, traversal_ids: [5, 7]),
- an_object_having_attributes(namespace_id: 8, traversal_ids: [5, 7, 8])
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb
deleted file mode 100644
index 4eec83879e3..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_ci_project_mirrors_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiProjectMirrors, :migration, schema: 20211208122201 do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_project_mirrors) { table(:ci_project_mirrors) }
-
- subject { described_class.new }
-
- describe '#perform' do
- it 'creates ci_project_mirrors for all projects in range' do
- namespaces.create!(id: 10, name: 'namespace1', path: 'namespace1')
- projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
- projects.create!(id: 7, namespace_id: 10, name: 'test2', path: 'test2')
- projects.create!(id: 8, namespace_id: 10, name: 'test3', path: 'test3')
-
- subject.perform(5, 7)
-
- expect(ci_project_mirrors.all).to contain_exactly(
- an_object_having_attributes(project_id: 5, namespace_id: 10),
- an_object_having_attributes(project_id: 7, namespace_id: 10)
- )
- end
-
- it 'handles existing ci_project_mirrors gracefully' do
- namespaces.create!(id: 10, name: 'namespace1', path: 'namespace1')
- namespaces.create!(id: 11, name: 'namespace2', path: 'namespace2', parent_id: 10)
- projects.create!(id: 5, namespace_id: 10, name: 'test1', path: 'test1')
- projects.create!(id: 7, namespace_id: 11, name: 'test2', path: 'test2')
- projects.create!(id: 8, namespace_id: 11, name: 'test3', path: 'test3')
-
- # Simulate a situation where a user has had a chance to move a project to another namespace
- # before the background migration has had a chance to run
- ci_project_mirrors.create!(project_id: 7, namespace_id: 10)
-
- subject.perform(5, 7)
-
- expect(ci_project_mirrors.all).to contain_exactly(
- an_object_having_attributes(project_id: 5, namespace_id: 10),
- an_object_having_attributes(project_id: 7, namespace_id: 10)
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
index 1aac5970a77..aaf8c124a83 100644
--- a/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiQueuingTables, :migration, schema: 20220208115439 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillCiQueuingTables, :migration,
+ :suppress_gitlab_schemas_validate_connection, schema: 20220208115439 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:ci_cd_settings) { table(:project_ci_cd_settings) }
diff --git a/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb b/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb
deleted file mode 100644
index 7c78d8b0305..00000000000
--- a/spec/lib/gitlab/background_migration/backfill_ci_runner_semver_spec.rb
+++ /dev/null
@@ -1,54 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::BackfillCiRunnerSemver, :migration, schema: 20220601151900 do
- let(:ci_runners) { table(:ci_runners, database: :ci) }
-
- subject do
- described_class.new(
- start_id: 10,
- end_id: 15,
- batch_table: :ci_runners,
- batch_column: :id,
- sub_batch_size: 10,
- pause_ms: 0,
- connection: Ci::ApplicationRecord.connection)
- end
-
- describe '#perform' do
- it 'populates semver column on all runners in range' do
- ci_runners.create!(id: 10, runner_type: 1, version: %q(HEAD-fd84d97))
- ci_runners.create!(id: 11, runner_type: 1, version: %q(v1.2.3))
- ci_runners.create!(id: 12, runner_type: 1, version: %q(2.1.0))
- ci_runners.create!(id: 13, runner_type: 1, version: %q(11.8.0~beta.935.g7f6d2abc))
- ci_runners.create!(id: 14, runner_type: 1, version: %q(13.2.2/1.1.0))
- ci_runners.create!(id: 15, runner_type: 1, version: %q('14.3.4'))
-
- subject.perform
-
- expect(ci_runners.all).to contain_exactly(
- an_object_having_attributes(id: 10, semver: nil),
- an_object_having_attributes(id: 11, semver: '1.2.3'),
- an_object_having_attributes(id: 12, semver: '2.1.0'),
- an_object_having_attributes(id: 13, semver: '11.8.0'),
- an_object_having_attributes(id: 14, semver: '13.2.2'),
- an_object_having_attributes(id: 15, semver: '14.3.4')
- )
- end
-
- it 'skips runners that already have semver value' do
- ci_runners.create!(id: 10, runner_type: 1, version: %q(1.2.4), semver: '1.2.3')
- ci_runners.create!(id: 11, runner_type: 1, version: %q(1.2.5))
- ci_runners.create!(id: 12, runner_type: 1, version: %q(HEAD), semver: '1.2.4')
-
- subject.perform
-
- expect(ci_runners.all).to contain_exactly(
- an_object_having_attributes(id: 10, semver: '1.2.3'),
- an_object_having_attributes(id: 11, semver: '1.2.5'),
- an_object_having_attributes(id: 12, semver: '1.2.4')
- )
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
index d84bc479554..e0be5a785b8 100644
--- a/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_group_features_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, s
batch_column: :id,
sub_batch_size: 10,
pause_ms: 0,
+ job_arguments: [4],
connection: ActiveRecord::Base.connection)
end
@@ -27,7 +28,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillGroupFeatures, :migration, s
group_features.create!(id: 1, group_id: 4)
expect(group_features.count).to eq 1
- expect { subject.perform(4) }.to change { group_features.count }.by(2)
+ expect { subject.perform }.to change { group_features.count }.by(2)
expect(group_features.count).to eq 3
expect(group_features.all.pluck(:group_id)).to contain_exactly(1, 3, 4)
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..564aa3b8c01
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceIdOfVulnerabilityReads, schema: 20220722145845 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:users) { table(:users) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:vulnerability_reads) { table(:vulnerability_reads) }
+
+ let(:namespace) { namespaces.create!(name: 'user', path: 'user') }
+ let(:project) { projects.create!(namespace_id: namespace.id, project_namespace_id: namespace.id) }
+ let(:user) { users.create!(username: 'john_doe', email: 'johndoe@gitlab.com', projects_limit: 10) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'external_id', name: 'Test Scanner') }
+ let(:vulnerability) do
+ vulnerabilities.create!(
+ project_id: project.id,
+ author_id: user.id,
+ title: 'test',
+ severity: 1,
+ confidence: 1,
+ report_type: 1
+ )
+ end
+
+ let(:vulnerability_read) do
+ vulnerability_reads.create!(
+ project_id: project.id,
+ vulnerability_id: vulnerability.id,
+ scanner_id: scanner.id,
+ severity: 1,
+ report_type: 1,
+ state: 1,
+ uuid: SecureRandom.uuid
+ )
+ end
+
+ subject(:perform_migration) do
+ described_class.new(start_id: vulnerability_read.vulnerability_id,
+ end_id: vulnerability_read.vulnerability_id,
+ batch_table: :vulnerability_reads,
+ batch_column: :vulnerability_id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'sets the namespace_id of existing record' do
+ expect { perform_migration }.to change { vulnerability_read.reload.namespace_id }.from(nil).to(namespace.id)
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
new file mode 100644
index 00000000000..ae296483166
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_project_import_level_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+# rubocop:disable Layout/HashAlignment
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectImportLevel do
+ let(:migration) do
+ described_class.new(
+ start_id: table(:namespaces).minimum(:id),
+ end_id: table(:namespaces).maximum(:id),
+ batch_table: :namespaces,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+ # rubocop:enable Layout/HashAlignment
+
+ let(:namespaces_table) { table(:namespaces) }
+ let(:namespace_settings_table) { table(:namespace_settings) }
+
+ let!(:user_namespace) do
+ namespaces_table.create!(
+ name: 'user_namespace',
+ path: 'user_namespace',
+ type: 'User',
+ project_creation_level: 100
+ )
+ end
+
+ let!(:group_namespace_nil) do
+ namespaces_table.create!(
+ name: 'group_namespace_nil',
+ path: 'group_namespace_nil',
+ type: 'Group',
+ project_creation_level: nil
+ )
+ end
+
+ let!(:group_namespace_0) do
+ namespaces_table.create!(
+ name: 'group_namespace_0',
+ path: 'group_namespace_0',
+ type: 'Group',
+ project_creation_level: 0
+ )
+ end
+
+ let!(:group_namespace_1) do
+ namespaces_table.create!(
+ name: 'group_namespace_1',
+ path: 'group_namespace_1',
+ type: 'Group',
+ project_creation_level: 1
+ )
+ end
+
+ let!(:group_namespace_2) do
+ namespaces_table.create!(
+ name: 'group_namespace_2',
+ path: 'group_namespace_2',
+ type: 'Group',
+ project_creation_level: 2
+ )
+ end
+
+ let!(:group_namespace_9999) do
+ namespaces_table.create!(
+ name: 'group_namespace_9999',
+ path: 'group_namespace_9999',
+ type: 'Group',
+ project_creation_level: 9999
+ )
+ end
+
+ subject(:perform_migration) { migration.perform }
+
+ before do
+ namespace_settings_table.create!(namespace_id: user_namespace.id)
+ namespace_settings_table.create!(namespace_id: group_namespace_nil.id)
+ namespace_settings_table.create!(namespace_id: group_namespace_0.id)
+ namespace_settings_table.create!(namespace_id: group_namespace_1.id)
+ namespace_settings_table.create!(namespace_id: group_namespace_2.id)
+ namespace_settings_table.create!(namespace_id: group_namespace_9999.id)
+ end
+
+ describe 'Groups' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:namespace_id, :prev_level, :new_level) do
+ lazy { group_namespace_0.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::NO_ACCESS
+ lazy { group_namespace_1.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::MAINTAINER
+ lazy { group_namespace_2.id } | ::Gitlab::Access::OWNER | ::Gitlab::Access::DEVELOPER
+ end
+
+ with_them do
+ it 'backfills the correct project_import_level of Group namespaces' do
+ expect { perform_migration }
+ .to change { namespace_settings_table.find_by(namespace_id: namespace_id).project_import_level }
+ .from(prev_level).to(new_level)
+ end
+ end
+
+ it 'does not update `User` namespaces or values outside range' do
+ expect { perform_migration }
+ .not_to change { namespace_settings_table.find_by(namespace_id: user_namespace.id).project_import_level }
+
+ expect { perform_migration }
+ .not_to change { namespace_settings_table.find_by(namespace_id: group_namespace_9999.id).project_import_level }
+ end
+
+ it 'maintains default import_level if creation_level is nil' do
+ project_import_level = namespace_settings_table.find_by(namespace_id: group_namespace_nil.id).project_import_level
+
+ expect { perform_migration }
+ .not_to change { project_import_level }
+
+ expect(project_import_level).to eq(::Gitlab::Access::OWNER)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb b/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
index 49056154744..4a65ecf8c75 100644
--- a/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillProjectsWithCoverage, schema: 20210818185845 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillProjectsWithCoverage,
+ :suppress_gitlab_schemas_validate_connection, schema: 20210818185845 do
let(:projects) { table(:projects) }
let(:project_ci_feature_usages) { table(:project_ci_feature_usages) }
let(:ci_pipelines) { table(:ci_pipelines) }
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index b5122af5cd4..6f75d3faef3 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migrat
let(:file_name) { 'file_name.rb' }
let(:content) { 'content' }
- let(:ids) { snippets.pluck('MIN(id)', 'MAX(id)').first }
+ let(:ids) { snippets.pick('MIN(id)', 'MAX(id)') }
let(:service) { described_class.new }
subject { service.perform(*ids) }
diff --git a/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
new file mode 100644
index 00000000000..79699375a8d
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent_spec.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillVulnerabilityReadsClusterAgent, :migration, schema: 20220525221133 do # rubocop:disable Layout/LineLength
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: 10,
+ batch_table: table_name, batch_column: batch_column,
+ sub_batch_size: sub_batch_size, pause_ms: pause_ms,
+ connection: ApplicationRecord.connection)
+ end
+
+ let(:users_table) { table(:users) }
+ let(:vulnerability_reads_table) { table(:vulnerability_reads) }
+ let(:vulnerability_scanners_table) { table(:vulnerability_scanners) }
+ let(:vulnerabilities_table) { table(:vulnerabilities) }
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:cluster_agents_table) { table(:cluster_agents) }
+
+ let(:table_name) { 'vulnerability_reads' }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 1_000 }
+ let(:pause_ms) { 0 }
+
+ subject(:perform_migration) { migration.perform }
+
+ before do
+ users_table.create!(id: 1, name: 'John Doe', email: 'test@example.com', projects_limit: 5)
+
+ namespaces_table.create!(id: 1, name: 'Namespace 1', path: 'namespace-1')
+ namespaces_table.create!(id: 2, name: 'Namespace 2', path: 'namespace-2')
+
+ projects_table.create!(id: 1, namespace_id: 1, name: 'Project 1', path: 'project-1', project_namespace_id: 1)
+ projects_table.create!(id: 2, namespace_id: 2, name: 'Project 2', path: 'project-2', project_namespace_id: 2)
+
+ cluster_agents_table.create!(id: 1, name: 'Agent 1', project_id: 1)
+ cluster_agents_table.create!(id: 2, name: 'Agent 2', project_id: 2)
+
+ vulnerability_scanners_table.create!(id: 1, project_id: 1, external_id: 'starboard', name: 'Starboard')
+ vulnerability_scanners_table.create!(id: 2, project_id: 2, external_id: 'starboard', name: 'Starboard')
+
+ add_vulnerability_read!(1, project_id: 1, cluster_agent_id: 1, report_type: 7)
+ add_vulnerability_read!(3, project_id: 1, cluster_agent_id: 2, report_type: 7)
+ add_vulnerability_read!(5, project_id: 2, cluster_agent_id: 2, report_type: 5)
+ add_vulnerability_read!(7, project_id: 2, cluster_agent_id: 3, report_type: 7)
+ add_vulnerability_read!(9, project_id: 2, cluster_agent_id: 2, report_type: 7)
+ add_vulnerability_read!(10, project_id: 1, cluster_agent_id: 1, report_type: 7)
+ add_vulnerability_read!(11, project_id: 1, cluster_agent_id: 1, report_type: 7)
+ end
+
+ it 'backfills `casted_cluster_agent_id` for the selected records', :aggregate_failures do
+ queries = ActiveRecord::QueryRecorder.new do
+ perform_migration
+ end
+
+ expect(queries.count).to eq(3)
+ expect(vulnerability_reads_table.where.not(casted_cluster_agent_id: nil).count).to eq 3
+ expect(vulnerability_reads_table.where.not(casted_cluster_agent_id: nil).pluck(:id)).to match_array([1, 9, 10])
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+
+ private
+
+ def add_vulnerability_read!(id, project_id:, cluster_agent_id:, report_type:)
+ vulnerabilities_table.create!(
+ id: id,
+ project_id: project_id,
+ author_id: 1,
+ title: "Vulnerability #{id}",
+ severity: 5,
+ confidence: 5,
+ report_type: report_type
+ )
+
+ vulnerability_reads_table.create!(
+ id: id,
+ uuid: SecureRandom.uuid,
+ severity: 5,
+ state: 1,
+ vulnerability_id: id,
+ scanner_id: project_id,
+ cluster_agent_id: cluster_agent_id.to_s,
+ project_id: project_id,
+ report_type: report_type
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
index 98866bb765f..f03f90ddbbb 100644
--- a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
@@ -3,6 +3,113 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ describe '.generic_instance' do
+ it 'defines generic instance with only some of the attributes set' do
+ generic_instance = described_class.generic_instance(
+ batch_table: 'projects', batch_column: 'id',
+ job_arguments: %w(x y), connection: connection
+ )
+
+ expect(generic_instance.send(:batch_table)).to eq('projects')
+ expect(generic_instance.send(:batch_column)).to eq('id')
+ expect(generic_instance.instance_variable_get('@job_arguments')).to eq(%w(x y))
+ expect(generic_instance.send(:connection)).to eq(connection)
+
+ %i(start_id end_id sub_batch_size pause_ms).each do |attr|
+ expect(generic_instance.send(attr)).to eq(0)
+ end
+ end
+ end
+
+ describe '.job_arguments' do
+ let(:job_class) do
+ Class.new(described_class) do
+ job_arguments :value_a, :value_b
+ end
+ end
+
+ subject(:job_instance) do
+ job_class.new(start_id: 1, end_id: 10,
+ batch_table: '_test_table',
+ batch_column: 'id',
+ sub_batch_size: 2,
+ pause_ms: 1000,
+ job_arguments: %w(a b),
+ connection: connection)
+ end
+
+ it 'defines methods' do
+ expect(job_instance.value_a).to eq('a')
+ expect(job_instance.value_b).to eq('b')
+ expect(job_class.job_arguments_count).to eq(2)
+ end
+
+ context 'when no job arguments are defined' do
+ let(:job_class) do
+ Class.new(described_class)
+ end
+
+ it 'job_arguments_count is 0' do
+ expect(job_class.job_arguments_count).to eq(0)
+ end
+ end
+ end
+
+ describe '.scope_to' do
+ subject(:job_instance) do
+ job_class.new(start_id: 1, end_id: 10,
+ batch_table: '_test_table',
+ batch_column: 'id',
+ sub_batch_size: 2,
+ pause_ms: 1000,
+ job_arguments: %w(a b),
+ connection: connection)
+ end
+
+ context 'when additional scoping is defined' do
+ let(:job_class) do
+ Class.new(described_class) do
+ job_arguments :value_a, :value_b
+ scope_to ->(r) { "#{r}-#{value_a}-#{value_b}".upcase }
+ end
+ end
+
+ it 'applies additional scope to the provided relation' do
+ expect(job_instance.filter_batch('relation')).to eq('RELATION-A-B')
+ end
+ end
+
+ context 'when there is no additional scoping defined' do
+ let(:job_class) do
+ Class.new(described_class) do
+ end
+ end
+
+ it 'returns provided relation as is' do
+ expect(job_instance.filter_batch('relation')).to eq('relation')
+ end
+ end
+ end
+
+ describe 'descendants', :eager_load do
+ it 'have the same method signature for #perform' do
+ expected_arity = described_class.instance_method(:perform).arity
+ offences = described_class.descendants.select { |klass| klass.instance_method(:perform).arity != expected_arity }
+
+ expect(offences).to be_empty, "expected no descendants of #{described_class} to accept arguments for " \
+ "'#perform', but some do: #{offences.join(", ")}"
+ end
+
+ it 'do not use .batching_scope' do
+ offences = described_class.descendants.select { |klass| klass.respond_to?(:batching_scope) }
+
+ expect(offences).to be_empty, "expected no descendants of #{described_class} to define '.batching_scope', " \
+ "but some do: #{offences.join(", ")}"
+ end
+ end
+
describe '#perform' do
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
@@ -66,6 +173,30 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
expect(test_table.order(:id).pluck(:to_column)).to contain_exactly(5, 10, nil, 20)
end
+ context 'with additional scoping' do
+ let(:job_class) do
+ Class.new(described_class) do
+ scope_to ->(r) { r.where('mod(id, 2) = 0') }
+
+ def perform(*job_arguments)
+ each_sub_batch(
+ operation_name: :update,
+ batching_arguments: { order_hint: :updated_at },
+ batching_scope: -> (relation) { relation.where.not(bar: nil) }
+ ) do |sub_batch|
+ sub_batch.update_all('to_column = from_column')
+ end
+ end
+ end
+ end
+
+ it 'respects #filter_batch' do
+ expect { perform_job }.to change { test_table.where(to_column: nil).count }.from(4).to(2)
+
+ expect(test_table.order(:id).pluck(:to_column)).to contain_exactly(nil, 10, nil, 20)
+ end
+ end
+
it 'instruments the batch operation' do
expect(job_instance.batch_metrics.affected_rows).to be_empty
@@ -84,7 +215,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
context 'when batching_arguments are given' do
it 'forwards them for batching' do
- expect(job_instance).to receive(:parent_batch_relation).and_return(test_table)
+ expect(job_instance).to receive(:base_relation).and_return(test_table)
expect(test_table).to receive(:each_batch).with(column: 'id', of: 2, order_hint: :updated_at)
@@ -155,6 +286,24 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
expect(job_instance.batch_metrics.affected_rows[:insert]).to contain_exactly(2, 1)
end
+
+ context 'when used in combination with scope_to' do
+ let(:job_class) do
+ Class.new(described_class) do
+ scope_to ->(r) { r.where.not(from_column: 10) }
+
+ def perform(*job_arguments)
+ distinct_each_batch(operation_name: :insert) do |sub_batch|
+ end
+ end
+ end
+ end
+
+ it 'raises an error' do
+ expect { perform_job }.to raise_error RuntimeError,
+ /distinct_each_batch can not be used when additional filters are defined with scope_to/
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
index 943b5744b64..9fdd7bf8adc 100644
--- a/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
+++ b/spec/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy_spec.rb
@@ -45,19 +45,16 @@ RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchi
end
end
- context 'when job_class is provided with a batching_scope' do
+ context 'when job class supports batch scope DSL' do
let(:job_class) do
- Class.new(described_class) do
- def self.batching_scope(relation, job_arguments:)
- min_id = job_arguments.first
-
- relation.where.not(type: 'Project').where('id >= ?', min_id)
- end
+ Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
+ job_arguments :min_id
+ scope_to ->(r) { r.where.not(type: 'Project').where('id >= ?', min_id) }
end
end
- it 'applies the batching scope' do
- expect(job_class).to receive(:batching_scope).and_call_original
+ it 'applies the additional scope' do
+ expect(job_class).to receive(:generic_instance).and_call_original
batch_bounds = batching_strategy.next_batch(:namespaces, :id, batch_min_value: namespace4.id, batch_size: 3, job_arguments: [1], job_class: job_class)
diff --git a/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb b/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb
deleted file mode 100644
index db822f36c21..00000000000
--- a/spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb
+++ /dev/null
@@ -1,51 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::CopyCiBuildsColumnsToSecurityScans, schema: 20210728174349 do
- let(:migration) { described_class.new }
-
- let_it_be(:namespaces) { table(:namespaces) }
- let_it_be(:projects) { table(:projects) }
- let_it_be(:ci_pipelines) { table(:ci_pipelines) }
- let_it_be(:ci_builds) { table(:ci_builds) }
- let_it_be(:security_scans) { table(:security_scans) }
-
- let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let!(:project1) { projects.create!(namespace_id: namespace.id) }
- let!(:project2) { projects.create!(namespace_id: namespace.id) }
- let!(:pipeline1) { ci_pipelines.create!(status: "success")}
- let!(:pipeline2) { ci_pipelines.create!(status: "success")}
-
- let!(:build1) { ci_builds.create!(commit_id: pipeline1.id, type: 'Ci::Build', project_id: project1.id) }
- let!(:build2) { ci_builds.create!(commit_id: pipeline2.id, type: 'Ci::Build', project_id: project2.id) }
- let!(:build3) { ci_builds.create!(commit_id: pipeline1.id, type: 'Ci::Build', project_id: project1.id) }
-
- let!(:scan1) { security_scans.create!(build_id: build1.id, scan_type: 1) }
- let!(:scan2) { security_scans.create!(build_id: build2.id, scan_type: 1) }
- let!(:scan3) { security_scans.create!(build_id: build3.id, scan_type: 1) }
-
- subject { migration.perform(scan1.id, scan2.id) }
-
- before do
- stub_const("#{described_class}::UPDATE_BATCH_SIZE", 2)
- end
-
- it 'copies `project_id`, `commit_id` from `ci_builds` to `security_scans`', :aggregate_failures do
- expect(migration).to receive(:mark_job_as_succeeded).with(scan1.id, scan2.id)
-
- subject
-
- scan1.reload
- expect(scan1.project_id).to eq(project1.id)
- expect(scan1.pipeline_id).to eq(pipeline1.id)
-
- scan2.reload
- expect(scan2.project_id).to eq(project2.id)
- expect(scan2.pipeline_id).to eq(pipeline2.id)
-
- scan3.reload
- expect(scan3.project_id).to be_nil
- expect(scan3.pipeline_id).to be_nil
- end
-end
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
index 78bd1afd8d2..9c33100a0b3 100644
--- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
ActiveRecord::Migration.new.extend(Gitlab::Database::MigrationHelpers)
end
+ let(:job_arguments) { %w(name name_convert_to_text) }
let(:copy_job) do
described_class.new(start_id: 12,
end_id: 20,
@@ -23,6 +24,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
batch_column: 'id',
sub_batch_size: sub_batch_size,
pause_ms: pause_ms,
+ job_arguments: job_arguments,
connection: connection)
end
@@ -53,32 +55,42 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
SQL
end
- it 'copies all primary keys in range' do
- temporary_column = helpers.convert_to_bigint_column(:id)
+ context 'primary keys' do
+ let(:temporary_column) { helpers.convert_to_bigint_column(:id) }
+ let(:job_arguments) { ['id', temporary_column] }
- copy_job.perform('id', temporary_column)
+ it 'copies all in range' do
+ copy_job.perform
- expect(test_table.count).to eq(4)
- expect(test_table.where("id = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15, 19)
- expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11)
+ expect(test_table.count).to eq(4)
+ expect(test_table.where("id = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15, 19)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11)
+ end
end
- it 'copies all foreign keys in range' do
- temporary_column = helpers.convert_to_bigint_column(:fk)
+ context 'foreign keys' do
+ let(:temporary_column) { helpers.convert_to_bigint_column(:fk) }
+ let(:job_arguments) { ['fk', temporary_column] }
- copy_job.perform('fk', temporary_column)
+ it 'copies all in range' do
+ copy_job.perform
- expect(test_table.count).to eq(4)
- expect(test_table.where("fk = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15, 19)
- expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11)
+ expect(test_table.count).to eq(4)
+ expect(test_table.where("fk = #{temporary_column}").pluck(:id)).to contain_exactly(12, 15, 19)
+ expect(test_table.where(temporary_column => 0).pluck(:id)).to contain_exactly(11)
+ end
end
- it 'copies columns with NULLs' do
- expect { copy_job.perform('name', 'name_convert_to_text') }
- .to change { test_table.where("name_convert_to_text = 'no name'").count }.from(4).to(1)
+ context 'columns with NULLs' do
+ let(:job_arguments) { %w(name name_convert_to_text) }
- expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(12, 19)
- expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
+ it 'copies all in range' do
+ expect { copy_job.perform }
+ .to change { test_table.where("name_convert_to_text = 'no name'").count }.from(4).to(1)
+
+ expect(test_table.where('name = name_convert_to_text').pluck(:id)).to contain_exactly(12, 19)
+ expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
+ end
end
context 'when multiple columns are given' do
@@ -87,8 +99,10 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
let(:columns_to_copy_from) { %w[id fk] }
let(:columns_to_copy_to) { [id_tmp_column, fk_tmp_column] }
+ let(:job_arguments) { [columns_to_copy_from, columns_to_copy_to] }
+
it 'copies all values in the range' do
- copy_job.perform(columns_to_copy_from, columns_to_copy_to)
+ copy_job.perform
expect(test_table.count).to eq(4)
expect(test_table.where("id = #{id_tmp_column} AND fk = #{fk_tmp_column}").pluck(:id)).to contain_exactly(12, 15, 19)
@@ -100,7 +114,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
it 'raises an error' do
expect do
- copy_job.perform(columns_to_copy_from, columns_to_copy_to)
+ copy_job.perform
end.to raise_error(ArgumentError, 'number of source and destination columns must match')
end
end
@@ -109,7 +123,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
it 'tracks timings of queries' do
expect(copy_job.batch_metrics.timings).to be_empty
- copy_job.perform('name', 'name_convert_to_text')
+ copy_job.perform
expect(copy_job.batch_metrics.timings[:update_all]).not_to be_empty
end
@@ -120,7 +134,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
it 'sleeps for the specified time between sub-batches' do
expect(copy_job).to receive(:sleep).with(0.005)
- copy_job.perform('name', 'name_convert_to_text')
+ copy_job.perform
end
context 'when pause_ms value is negative' do
@@ -129,7 +143,7 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
it 'treats it as a 0' do
expect(copy_job).to receive(:sleep).with(0)
- copy_job.perform('name', 'name_convert_to_text')
+ copy_job.perform
end
end
end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
new file mode 100644
index 00000000000..d20eaef3650
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects,
+ :migration,
+ schema: 20220722084543 do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:project_settings_table) { table(:project_settings) }
+ let(:project_statistics_table) { table(:project_statistics) }
+ let(:issues_table) { table(:issues) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'sets `legacy_open_source_license_available` to false only for public projects with no issues and no repo',
+ :aggregate_failures do
+ project_with_no_issues_no_repo = create_legacy_license_public_project('project-with-no-issues-no-repo')
+ project_with_repo = create_legacy_license_public_project('project-with-repo', repo_size: 1)
+ project_with_issues = create_legacy_license_public_project('project-with-issues', with_issue: true)
+ project_with_issues_and_repo =
+ create_legacy_license_public_project('project-with-issues-and-repo', repo_size: 1, with_issue: true)
+
+ queries = ActiveRecord::QueryRecorder.new { perform_migration }
+
+ expect(queries.count).to eq(7)
+ expect(migrated_attribute(project_with_no_issues_no_repo)).to be_falsey
+ expect(migrated_attribute(project_with_repo)).to be_truthy
+ expect(migrated_attribute(project_with_issues)).to be_truthy
+ expect(migrated_attribute(project_with_issues_and_repo)).to be_truthy
+ end
+
+ def create_legacy_license_public_project(path, repo_size: 0, with_issue: false)
+ namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
+ project_namespace =
+ namespaces_table.create!(name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project')
+ project = projects_table
+ .create!(
+ name: path, path: path, namespace_id: namespace.id,
+ project_namespace_id: project_namespace.id, visibility_level: 20
+ )
+
+ project_statistics_table.create!(project_id: project.id, namespace_id: namespace.id, repository_size: repo_size)
+ issues_table.create!(project_id: project.id) if with_issue
+ project_settings_table.create!(project_id: project.id, legacy_open_source_license_available: true)
+
+ project
+ end
+
+ def migrated_attribute(project)
+ project_settings_table.find(project.id).legacy_open_source_license_available
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
new file mode 100644
index 00000000000..0dba1d7c8a2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::DisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects,
+ :migration,
+ schema: 20220721031446 do
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:project_settings_table) { table(:project_settings) }
+ let(:project_statistics_table) { table(:project_statistics) }
+ let(:users_table) { table(:users) }
+ let(:project_authorizations_table) { table(:project_authorizations) }
+
+ subject(:perform_migration) do
+ described_class.new(start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
+ batch_table: :projects,
+ batch_column: :id,
+ sub_batch_size: 2,
+ pause_ms: 0,
+ connection: ActiveRecord::Base.connection)
+ .perform
+ end
+
+ it 'sets `legacy_open_source_license_available` to false only for public projects with 1 member and no repo',
+ :aggregate_failures do
+ project_with_no_repo_one_member = create_legacy_license_public_project('project-with-one-member-no-repo')
+ project_with_repo_one_member = create_legacy_license_public_project('project-with-repo', repo_size: 1)
+ project_with_no_repo_two_members = create_legacy_license_public_project('project-with-two-members', members: 2)
+ project_with_repo_two_members =
+ create_legacy_license_public_project('project-with-repo', repo_size: 1, members: 2)
+
+ queries = ActiveRecord::QueryRecorder.new { perform_migration }
+
+ expect(queries.count).to eq(7)
+ expect(migrated_attribute(project_with_no_repo_one_member)).to be_falsey
+ expect(migrated_attribute(project_with_repo_one_member)).to be_truthy
+ expect(migrated_attribute(project_with_no_repo_two_members)).to be_truthy
+ expect(migrated_attribute(project_with_repo_two_members)).to be_truthy
+ end
+
+ def create_legacy_license_public_project(path, repo_size: 0, members: 1)
+ namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
+ project_namespace =
+ namespaces_table.create!(name: "-project-namespace-#{path}", path: "project-namespace-#{path}", type: 'Project')
+ project = projects_table
+ .create!(
+ name: path, path: path, namespace_id: namespace.id,
+ project_namespace_id: project_namespace.id, visibility_level: 20
+ )
+
+ members.times do |member_id|
+ user = users_table.create!(email: "user#{member_id}-project-#{project.id}@gitlab.com", projects_limit: 100)
+ project_authorizations_table.create!(project_id: project.id, user_id: user.id, access_level: 50)
+ end
+ project_statistics_table.create!(project_id: project.id, namespace_id: namespace.id, repository_size: repo_size)
+ project_settings_table.create!(project_id: project.id, legacy_open_source_license_available: true)
+
+ project
+ end
+
+ def migrated_attribute(project)
+ project_settings_table.find(project.id).legacy_open_source_license_available
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb
index 7cc64889fc8..5fdd8683d06 100644
--- a/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb
+++ b/spec/lib/gitlab/background_migration/drop_invalid_security_findings_spec.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::DropInvalidSecurityFindings, schema: 20211108211434 do
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidSecurityFindings, :suppress_gitlab_schemas_validate_connection,
+ schema: 20211108211434 do
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user', type: Namespaces::UserNamespace.sti_name) }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
diff --git a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
index 65d55f85a98..51a09d50a19 100644
--- a/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
+++ b/spec/lib/gitlab/background_migration/extract_project_topics_into_separate_table_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable, schema: 20210730104800 do
+RSpec.describe Gitlab::BackgroundMigration::ExtractProjectTopicsIntoSeparateTable,
+ :suppress_gitlab_schemas_validate_connection, schema: 20210730104800 do
it 'correctly extracts project topics into separate table' do
namespaces = table(:namespaces)
projects = table(:projects)
diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
index 5e2f32c54be..5495d786a48 100644
--- a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics, schema: 20210511095658 do
+RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics,
+ :suppress_gitlab_schemas_validate_connection, schema: 20210511095658 do
it 'correctly migrates project taggings context from tags to topics' do
taggings = table(:taggings)
diff --git a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
index e38edfc3643..2f0eef3c399 100644
--- a/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
+++ b/spec/lib/gitlab/background_migration/nullify_orphan_runner_id_on_ci_builds_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds, :migration, schema: 20220223112304 do
+RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds,
+ :suppress_gitlab_schemas_validate_connection, migration: :gitlab_ci, schema: 20220223112304 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
- let(:ci_runners) { table(:ci_runners, database: :ci) }
- let(:ci_pipelines) { table(:ci_pipelines, database: :ci) }
- let(:ci_builds) { table(:ci_builds, database: :ci) }
+ let(:ci_runners) { table(:ci_runners) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_builds) { table(:ci_builds) }
subject { described_class.new }
@@ -20,7 +21,9 @@ RSpec.describe Gitlab::BackgroundMigration::NullifyOrphanRunnerIdOnCiBuilds, :mi
end
after do
- helpers.add_concurrent_foreign_key(:ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false)
+ helpers.add_concurrent_foreign_key(
+ :ci_builds, :ci_runners, column: :runner_id, on_delete: :nullify, validate: false
+ )
end
describe '#perform' do
diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
index 2ad561ead87..bff803e2035 100644
--- a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
+++ b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb
@@ -5,199 +5,211 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration, schema: 20220326161803 do
include MigrationsHelpers
- context 'when migrating data', :aggregate_failures do
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
+ RSpec.shared_examples 'backfills project namespaces' do
+ context 'when migrating data', :aggregate_failures do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
- let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
- let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
+ let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') }
+ let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') }
- let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
- let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
+ let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) }
+ let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) }
- let(:child_nodes_count) { 2 }
- let(:tree_depth) { 3 }
+ let(:child_nodes_count) { 2 }
+ let(:tree_depth) { 3 }
- let(:backfilled_namespace) { nil }
+ let(:backfilled_namespace) { nil }
- before do
- BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
- end
-
- describe '#up' do
- shared_examples 'back-fill project namespaces' do
- it 'back-fills all project namespaces' do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- projects_count = ::Project.count
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_count = ::Namespace.where(type: 'Project').count
- migration = described_class.new
-
- expect(projects_count).not_to eq(project_namespaces_count)
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
-
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
-
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- check_projects_in_sync_with(Namespace.where(type: 'Project'))
- end
-
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
-
- it 'back-fills project namespaces for the specified group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
+ before do
+ BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree
+ end
+ describe '#up' do
+ shared_examples 'back-fill project namespaces' do
+ it 'back-fills all project namespaces' do
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ projects_count = ::Project.count
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_count = ::Namespace.where(type: 'Project').count
migration = described_class.new
- expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(projects_count).not_to eq(project_namespaces_count)
expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
- expect(group_projects_count).to eq(14)
- expect(project_namespaces_in_hierarchy.count).to eq(0)
-
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
+ expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count)
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- check_projects_in_sync_with(project_namespaces_in_hierarchy)
+ expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
+ check_projects_in_sync_with(Namespace.where(type: 'Project'))
end
- end
- context 'when projects already have project namespaces' do
- before do
- hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
- start_id = hierarchy1_projects.minimum(:id)
- end_id = hierarchy1_projects.maximum(:id)
+ context 'when passing specific group as parameter' do
+ let(:backfilled_namespace) { parent_group1 }
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
- end
+ it 'back-fills project namespaces for the specified group hierarchy' do
+ backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
+ start_id = backfilled_namespace_projects.minimum(:id)
+ end_id = backfilled_namespace_projects.maximum(:id)
+ group_projects_count = backfilled_namespace_projects.count
+ batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- it 'does not duplicate project namespaces' do
- # check there are already some project namespaces but not for all
- projects_count = ::Project.count
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces = ::Namespace.where(type: 'Project')
- migration = described_class.new
+ migration = described_class.new
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
- expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
- expect(projects_count).not_to eq(project_namespaces.count)
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
- # run migration again to test we do not generate extra project namespaces
- expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
+ expect(group_projects_count).to eq(14)
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
- expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
+ migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'up')
- expect(projects_count).to eq(project_namespaces.count)
+ expect(project_namespaces_in_hierarchy.count).to eq(14)
+ check_projects_in_sync_with(project_namespaces_in_hierarchy)
+ end
end
- end
- end
- it 'checks no project namespaces exist in the defined hierarchies' do
- hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
- hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
- hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
- hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
+ context 'when projects already have project namespaces' do
+ before do
+ hierarchy1_projects = base_ancestor(parent_group1).first.all_projects
+ start_id = hierarchy1_projects.minimum(:id)
+ end_id = hierarchy1_projects.maximum(:id)
+
+ described_class.new.perform(start_id, end_id, nil, nil, nil, nil, parent_group1.id, 'up')
+ end
+
+ it 'does not duplicate project namespaces' do
+ # check there are already some project namespaces but not for all
+ projects_count = ::Project.count
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces = ::Namespace.where(type: 'Project')
+ migration = described_class.new
+
+ expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14
+ expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0)
+ expect(projects_count).not_to eq(project_namespaces.count)
+
+ # run migration again to test we do not generate extra project namespaces
+ expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original
+
+ expect { migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up') }.to change(project_namespaces, :count).by(14)
+
+ expect(projects_count).to eq(project_namespaces.count)
+ end
+ end
+ end
- expect(hierarchy1_project_namespaces).to be_empty
- expect(hierarchy2_project_namespaces).to be_empty
- expect(hierarchy1_projects_count).to eq(14)
- expect(hierarchy2_projects_count).to eq(14)
- end
+ it 'checks no project namespaces exist in the defined hierarchies' do
+ hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1))
+ hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2))
+ hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count
+ hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count
- context 'back-fill project namespaces in a single batch' do
- it_behaves_like 'back-fill project namespaces'
- end
+ expect(hierarchy1_project_namespaces).to be_empty
+ expect(hierarchy2_project_namespaces).to be_empty
+ expect(hierarchy1_projects_count).to eq(14)
+ expect(hierarchy2_projects_count).to eq(14)
+ end
- context 'back-fill project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ context 'back-fill project namespaces in a single batch' do
+ it_behaves_like 'back-fill project namespaces'
end
- it_behaves_like 'back-fill project namespaces'
- end
- end
+ context 'back-fill project namespaces in batches' do
+ before do
+ stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ end
- describe '#down' do
- before do
- start_id = ::Project.minimum(:id)
- end_id = ::Project.maximum(:id)
- # back-fill first
- described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
+ it_behaves_like 'back-fill project namespaces'
+ end
end
- shared_examples 'cleanup project namespaces' do
- it 'removes project namespaces' do
- projects_count = ::Project.count
+ describe '#down' do
+ before do
start_id = ::Project.minimum(:id)
end_id = ::Project.maximum(:id)
- migration = described_class.new
- batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ # back-fill first
+ described_class.new.perform(start_id, end_id, nil, nil, nil, nil, nil, 'up')
+ end
- expect(projects_count).to be > 0
- expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
+ shared_examples 'cleanup project namespaces' do
+ it 'removes project namespaces' do
+ projects_count = ::Project.count
+ start_id = ::Project.minimum(:id)
+ end_id = ::Project.maximum(:id)
+ migration = described_class.new
+ batches_count = (projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
+ expect(projects_count).to be > 0
+ expect(projects_count).to eq(::Namespace.where(type: 'Project').count)
- migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
+ expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
- expect(::Project.count).to be > 0
- expect(::Namespace.where(type: 'Project').count).to eq(0)
- end
+ migration.perform(start_id, end_id, nil, nil, nil, nil, nil, 'down')
+
+ expect(::Project.count).to be > 0
+ expect(::Namespace.where(type: 'Project').count).to eq(0)
+ end
- context 'when passing specific group as parameter' do
- let(:backfilled_namespace) { parent_group1 }
+ context 'when passing specific group as parameter' do
+ let(:backfilled_namespace) { parent_group1 }
- it 'removes project namespaces only for the specific group hierarchy' do
- backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
- start_id = backfilled_namespace_projects.minimum(:id)
- end_id = backfilled_namespace_projects.maximum(:id)
- group_projects_count = backfilled_namespace_projects.count
- batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
- project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
- migration = described_class.new
+ it 'removes project namespaces only for the specific group hierarchy' do
+ backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects
+ start_id = backfilled_namespace_projects.minimum(:id)
+ end_id = backfilled_namespace_projects.maximum(:id)
+ group_projects_count = backfilled_namespace_projects.count
+ batches_count = (group_projects_count / described_class::SUB_BATCH_SIZE.to_f).ceil
+ project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace))
+ migration = described_class.new
- expect(project_namespaces_in_hierarchy.count).to eq(14)
- expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
- expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
+ expect(project_namespaces_in_hierarchy.count).to eq(14)
+ expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original
+ expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original
- migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
+ migration.perform(start_id, end_id, nil, nil, nil, nil, backfilled_namespace.id, 'down')
- expect(::Namespace.where(type: 'Project').count).to be > 0
- expect(project_namespaces_in_hierarchy.count).to eq(0)
+ expect(::Namespace.where(type: 'Project').count).to be > 0
+ expect(project_namespaces_in_hierarchy.count).to eq(0)
+ end
end
end
- end
- context 'cleanup project namespaces in a single batch' do
- it_behaves_like 'cleanup project namespaces'
- end
-
- context 'cleanup project namespaces in batches' do
- before do
- stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ context 'cleanup project namespaces in a single batch' do
+ it_behaves_like 'cleanup project namespaces'
end
- it_behaves_like 'cleanup project namespaces'
+ context 'cleanup project namespaces in batches' do
+ before do
+ stub_const("#{described_class.name}::SUB_BATCH_SIZE", 2)
+ end
+
+ it_behaves_like 'cleanup project namespaces'
+ end
end
end
end
+ it_behaves_like 'backfills project namespaces'
+
+ context 'when namespaces.id is bigint' do
+ before do
+ namespaces.connection.execute("ALTER TABLE namespaces ALTER COLUMN id TYPE bigint")
+ end
+
+ it_behaves_like 'backfills project namespaces'
+ end
+
def base_ancestor(ancestor)
::Namespace.where(id: ancestor.id)
end
@@ -209,7 +221,7 @@ RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNa
def check_projects_in_sync_with(namespaces)
project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled)
corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first))
- .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
+ .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled)
expect(project_namespaces_attrs).to eq(corresponding_projects_attrs)
end
diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
index 8d71b117107..a609227be05 100644
--- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
+++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb
@@ -20,7 +20,7 @@ def create_background_migration_job(ids, status)
)
end
-RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20211124132705 do
+RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, :suppress_gitlab_schemas_validate_connection, schema: 20211124132705 do
let(:background_migration_jobs) { table(:background_migration_jobs) }
let(:pending_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['pending']) }
let(:succeeded_jobs) { background_migration_jobs.where(status: Gitlab::Database::BackgroundMigrationJob.statuses['succeeded']) }
diff --git a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
index 8cdcec9621c..eabc012f98b 100644
--- a/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_all_trace_expiration_dates_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration, schema: 20220131000001 do
+RSpec.describe Gitlab::BackgroundMigration::RemoveAllTraceExpirationDates, :migration,
+ :suppress_gitlab_schemas_validate_connection, schema: 20220131000001 do
subject(:perform) { migration.perform(1, 99) }
let(:migration) { described_class.new }
diff --git a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
index 07cff32304e..33ad74fbee8 100644
--- a/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
+++ b/spec/lib/gitlab/background_migration/remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration, schema: 20220326161803 do
+RSpec.describe Gitlab::BackgroundMigration::RemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration,
+ :suppress_gitlab_schemas_validate_connection, schema: 20220326161803 do
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:users) { table(:users) }
let(:user) { create_user! }
diff --git a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
index 035ea6eadcf..e9f73672144 100644
--- a/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -4,14 +4,14 @@ require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableForNonPublicProjects,
:migration,
- schema: 20220520040416 do
+ schema: 20220722110026 do
let(:namespaces_table) { table(:namespaces) }
let(:projects_table) { table(:projects) }
let(:project_settings_table) { table(:project_settings) }
subject(:perform_migration) do
- described_class.new(start_id: 1,
- end_id: 30,
+ described_class.new(start_id: projects_table.minimum(:id),
+ end_id: projects_table.maximum(:id),
batch_table: :projects,
batch_column: :id,
sub_batch_size: 2,
@@ -20,35 +20,34 @@ RSpec.describe Gitlab::BackgroundMigration::SetLegacyOpenSourceLicenseAvailableF
.perform
end
- let(:queries) { ActiveRecord::QueryRecorder.new { perform_migration } }
+ it 'sets `legacy_open_source_license_available` attribute to false for non-public projects', :aggregate_failures do
+ private_project = create_legacy_license_project('private-project', visibility_level: 0)
+ internal_project = create_legacy_license_project('internal-project', visibility_level: 10)
+ public_project = create_legacy_license_project('public-project', visibility_level: 20)
- before do
- namespaces_table.create!(id: 1, name: 'namespace', path: 'namespace-path-1')
- namespaces_table.create!(id: 2, name: 'namespace', path: 'namespace-path-2', type: 'Project')
- namespaces_table.create!(id: 3, name: 'namespace', path: 'namespace-path-3', type: 'Project')
- namespaces_table.create!(id: 4, name: 'namespace', path: 'namespace-path-4', type: 'Project')
+ queries = ActiveRecord::QueryRecorder.new { perform_migration }
- projects_table
- .create!(id: 11, name: 'proj-1', path: 'path-1', namespace_id: 1, project_namespace_id: 2, visibility_level: 0)
- projects_table
- .create!(id: 12, name: 'proj-2', path: 'path-2', namespace_id: 1, project_namespace_id: 3, visibility_level: 10)
- projects_table
- .create!(id: 13, name: 'proj-3', path: 'path-3', namespace_id: 1, project_namespace_id: 4, visibility_level: 20)
+ expect(queries.count).to eq(5)
- project_settings_table.create!(project_id: 11, legacy_open_source_license_available: true)
- project_settings_table.create!(project_id: 12, legacy_open_source_license_available: true)
- project_settings_table.create!(project_id: 13, legacy_open_source_license_available: true)
+ expect(migrated_attribute(private_project)).to be_falsey
+ expect(migrated_attribute(internal_project)).to be_falsey
+ expect(migrated_attribute(public_project)).to be_truthy
end
- it 'sets `legacy_open_source_license_available` attribute to false for non-public projects', :aggregate_failures do
- expect(queries.count).to eq(3)
-
- expect(migrated_attribute(11)).to be_falsey
- expect(migrated_attribute(12)).to be_falsey
- expect(migrated_attribute(13)).to be_truthy
+ def create_legacy_license_project(path, visibility_level:)
+ namespace = namespaces_table.create!(name: "namespace-#{path}", path: "namespace-#{path}")
+ project_namespace = namespaces_table.create!(name: "project-namespace-#{path}", path: path, type: 'Project')
+ project = projects_table.create!(name: path,
+ path: path,
+ namespace_id: namespace.id,
+ project_namespace_id: project_namespace.id,
+ visibility_level: visibility_level)
+ project_settings_table.create!(project_id: project.id, legacy_open_source_license_available: true)
+
+ project
end
- def migrated_attribute(project_id)
- project_settings_table.find(project_id).legacy_open_source_license_available
+ def migrated_attribute(project)
+ project_settings_table.find(project.id).legacy_open_source_license_available
end
end
diff --git a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
index b96d3f7f0b5..c090c1df424 100644
--- a/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url_spec.rb
@@ -2,10 +2,26 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl, schema: 20210421163509 do
- let(:services_table) { table(:services) }
- let(:service_jira_cloud) { services_table.create!(id: 1, type: 'JiraService') }
- let(:service_jira_server) { services_table.create!(id: 2, type: 'JiraService') }
+RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl do
+ let(:integrations_table) { table(:integrations) }
+ let(:service_jira_cloud) { integrations_table.create!(id: 1, type_new: 'JiraService') }
+ let(:service_jira_server) { integrations_table.create!(id: 2, type_new: 'JiraService') }
+ let(:service_jira_unknown) { integrations_table.create!(id: 3, type_new: 'JiraService') }
+
+ let(:table_name) { :jira_tracker_data }
+ let(:batch_column) { :id }
+ let(:sub_batch_size) { 1 }
+ let(:pause_ms) { 0 }
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: 10,
+ batch_table: table_name, batch_column: batch_column,
+ sub_batch_size: sub_batch_size, pause_ms: pause_ms,
+ connection: ApplicationRecord.connection)
+ end
+
+ subject(:perform_migration) do
+ migration.perform
+ end
before do
jira_tracker_data = Class.new(ApplicationRecord) do
@@ -27,18 +43,21 @@ RSpec.describe Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeB
end
stub_const('JiraTrackerData', jira_tracker_data)
- end
- let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, service_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: 0) }
- let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, service_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: 0) }
+ stub_const('UNKNOWN', 0)
+ stub_const('SERVER', 1)
+ stub_const('CLOUD', 2)
+ end
- subject { described_class.new.perform(tracker_data_cloud.id, tracker_data_server.id) }
+ let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, integration_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: UNKNOWN) }
+ let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, integration_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: UNKNOWN) }
+ let!(:tracker_data_unknown) { JiraTrackerData.create!(id: 3, integration_id: service_jira_unknown.id, url: "", deployment_type: UNKNOWN) }
it "changes unknown deployment_types based on URL" do
- expect(JiraTrackerData.pluck(:deployment_type)).to eq([0, 0])
+ expect(JiraTrackerData.pluck(:deployment_type)).to match_array([UNKNOWN, UNKNOWN, UNKNOWN])
- subject
+ perform_migration
- expect(JiraTrackerData.pluck(:deployment_type)).to eq([2, 1])
+ expect(JiraTrackerData.order(:id).pluck(:deployment_type)).to match_array([CLOUD, SERVER, UNKNOWN])
end
end
diff --git a/spec/lib/gitlab/background_task_spec.rb b/spec/lib/gitlab/background_task_spec.rb
new file mode 100644
index 00000000000..102556b6b2f
--- /dev/null
+++ b/spec/lib/gitlab/background_task_spec.rb
@@ -0,0 +1,209 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+# We need to capture task state from a closure, which requires instance variables.
+# rubocop: disable RSpec/InstanceVariable
+RSpec.describe Gitlab::BackgroundTask do
+ let(:options) { {} }
+ let(:task) do
+ proc do
+ @task_run = true
+ @task_thread = Thread.current
+ end
+ end
+
+ subject(:background_task) { described_class.new(task, **options) }
+
+ def expect_condition
+ Timeout.timeout(3) do
+ sleep 0.1 until yield
+ end
+ end
+
+ context 'when stopped' do
+ it 'is not running' do
+ expect(background_task).not_to be_running
+ end
+
+ describe '#start' do
+ it 'runs the given task on a background thread' do
+ test_thread = Thread.current
+
+ background_task.start
+
+ expect_condition { @task_run == true }
+ expect_condition { @task_thread != test_thread }
+ expect(background_task).to be_running
+ end
+
+ it 'returns self' do
+ expect(background_task.start).to be(background_task)
+ end
+
+ context 'when installing exit handler' do
+ it 'stops a running background task' do
+ expect(background_task).to receive(:at_exit).and_yield
+
+ background_task.start
+
+ expect(background_task).not_to be_running
+ end
+ end
+
+ context 'when task responds to start' do
+ let(:task_class) do
+ Struct.new(:started, :start_retval, :run) do
+ def start
+ self.started = true
+ self.start_retval
+ end
+
+ def call
+ self.run = true
+ end
+ end
+ end
+
+ let(:task) { task_class.new }
+
+ it 'calls start' do
+ background_task.start
+
+ expect_condition { task.started == true }
+ end
+
+ context 'when start returns true' do
+ it 'runs the task' do
+ task.start_retval = true
+
+ background_task.start
+
+ expect_condition { task.run == true }
+ end
+ end
+
+ context 'when start returns false' do
+ it 'does not run the task' do
+ task.start_retval = false
+
+ background_task.start
+
+ expect_condition { task.run.nil? }
+ end
+ end
+ end
+
+ context 'when synchronous is set to true' do
+ let(:options) { { synchronous: true } }
+
+ it 'calls join on the thread' do
+ # Thread has to be run in a block, expect_next_instance_of does not support this.
+ allow_any_instance_of(Thread).to receive(:join) # rubocop:disable RSpec/AnyInstanceOf
+
+ background_task.start
+
+ expect_condition { @task_run == true }
+ expect(@task_thread).to have_received(:join)
+ end
+ end
+ end
+
+ describe '#stop' do
+ it 'is a no-op' do
+ expect { background_task.stop }.not_to change { subject.running? }
+ expect_condition { @task_run.nil? }
+ end
+ end
+ end
+
+ context 'when running' do
+ before do
+ background_task.start
+ end
+
+ describe '#start' do
+ it 'raises an error' do
+ expect { background_task.start }.to raise_error(described_class::AlreadyStartedError)
+ end
+ end
+
+ describe '#stop' do
+ it 'stops running' do
+ expect { background_task.stop }.to change { subject.running? }.from(true).to(false)
+ end
+
+ context 'when task responds to stop' do
+ let(:task_class) do
+ Struct.new(:stopped, :call) do
+ def stop
+ self.stopped = true
+ end
+ end
+ end
+
+ let(:task) { task_class.new }
+
+ it 'calls stop' do
+ background_task.stop
+
+ expect_condition { task.stopped == true }
+ end
+ end
+
+ context 'when task stop raises an error' do
+ let(:error) { RuntimeError.new('task error') }
+ let(:options) { { name: 'test_background_task' } }
+
+ let(:task_class) do
+ Struct.new(:call, :error, keyword_init: true) do
+ def stop
+ raise error
+ end
+ end
+ end
+
+ let(:task) { task_class.new(error: error) }
+
+ it 'stops gracefully' do
+ expect { background_task.stop }.not_to raise_error
+ expect(background_task).not_to be_running
+ end
+
+ it 'reports the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ error, { extra: { reported_by: 'test_background_task' } }
+ )
+
+ background_task.stop
+ end
+ end
+ end
+
+ context 'when task run raises exception' do
+ let(:error) { RuntimeError.new('task error') }
+ let(:options) { { name: 'test_background_task' } }
+ let(:task) do
+ proc do
+ @task_run = true
+ raise error
+ end
+ end
+
+ it 'stops gracefully' do
+ expect_condition { @task_run == true }
+ expect { background_task.stop }.not_to raise_error
+ expect(background_task).not_to be_running
+ end
+
+ it 'reports the error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ error, { extra: { reported_by: 'test_background_task' } }
+ )
+
+ background_task.stop
+ end
+ end
+ end
+end
+# rubocop: enable RSpec/InstanceVariable
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
index d29447ee376..becfdced5fb 100644
--- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -54,16 +54,16 @@ RSpec.describe ::Gitlab::BareRepositoryImport::Repository do
end
context 'hashed storage' do
- let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" }
let(:root_path) { TestEnv.repos_path }
let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
let(:raw_repository) { Gitlab::Git::Repository.new('default', "#{hashed_path}.git", nil, nil) }
+ let(:full_path) { 'to/repo' }
before do
raw_repository.create_repository
- raw_repository.set_full_path(full_path: 'to/repo')
+ raw_repository.set_full_path(full_path: full_path) if full_path
end
after do
@@ -95,16 +95,17 @@ RSpec.describe ::Gitlab::BareRepositoryImport::Repository do
expect(subject).not_to be_processable
end
- it 'returns false when group and project name are missing' do
- repository = Rugged::Repository.new(repo_path)
- repository.config.delete('gitlab.fullpath')
-
- expect(subject).not_to be_processable
- end
-
it 'returns true when group path and project name are present' do
expect(subject).to be_processable
end
+
+ context 'group and project name are missing' do
+ let(:full_path) { nil }
+
+ it 'returns false' do
+ expect(subject).not_to be_processable
+ end
+ end
end
describe '#project_full_path' do
diff --git a/spec/lib/gitlab/batch_pop_queueing_spec.rb b/spec/lib/gitlab/batch_pop_queueing_spec.rb
index 41efc5417e4..5af78ddabe7 100644
--- a/spec/lib/gitlab/batch_pop_queueing_spec.rb
+++ b/spec/lib/gitlab/batch_pop_queueing_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::BatchPopQueueing do
context 'when the queue key does not exist in Redis' do
before do
- allow(queue).to receive(:enqueue) { }
+ allow(queue).to receive(:enqueue) {}
end
it 'yields empty array' do
diff --git a/spec/lib/gitlab/chat_name_token_spec.rb b/spec/lib/gitlab/chat_name_token_spec.rb
index 906c02d54db..8d5702a6b27 100644
--- a/spec/lib/gitlab/chat_name_token_spec.rb
+++ b/spec/lib/gitlab/chat_name_token_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ChatNameToken do
context 'when using unknown token' do
- let(:token) { }
+ let(:token) {}
subject { described_class.new(token).get }
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index 27c2b005a93..30359a7170f 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -210,8 +210,8 @@ RSpec.describe Gitlab::Ci::Ansi2html do
let(:section_start_time) { Time.new(2017, 9, 20).utc }
let(:section_duration) { 3.seconds }
let(:section_end_time) { section_start_time + section_duration }
- let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K"}
- let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K"}
+ let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K" }
+ let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K" }
let(:section_start_html) do
'<div class="section-start"' \
" data-timestamp=\"#{section_start_time.to_i}\" data-section=\"#{class_name(section_name)}\"" \
@@ -258,13 +258,13 @@ RSpec.describe Gitlab::Ci::Ansi2html do
it_behaves_like 'a legit section'
context 'section name includes $' do
- let(:section_name) { 'my_$ection'}
+ let(:section_name) { 'my_$ection' }
it_behaves_like 'forbidden char in section_name'
end
context 'section name includes <' do
- let(:section_name) { '<a_tag>'}
+ let(:section_name) { '<a_tag>' }
it_behaves_like 'forbidden char in section_name'
end
diff --git a/spec/lib/gitlab/ci/ansi2json_spec.rb b/spec/lib/gitlab/ci/ansi2json_spec.rb
index f9d23ff97bc..4b3b049176f 100644
--- a/spec/lib/gitlab/ci/ansi2json_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json_spec.rb
@@ -78,8 +78,8 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:section_duration) { 63.seconds }
let(:section_start_time) { Time.new(2019, 9, 17).utc }
let(:section_end_time) { section_start_time + section_duration }
- let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K"}
- let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K"}
+ let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K" }
+ let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K" }
it 'marks the first line of the section as header' do
expect(convert_json("Hello#{section_start}world!")).to eq([
@@ -258,8 +258,8 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:nested_section_duration) { 2.seconds }
let(:nested_section_start_time) { Time.new(2019, 9, 17).utc }
let(:nested_section_end_time) { nested_section_start_time + nested_section_duration }
- let(:nested_section_start) { "section_start:#{nested_section_start_time.to_i}:#{nested_section_name}\r\033[0K"}
- let(:nested_section_end) { "section_end:#{nested_section_end_time.to_i}:#{nested_section_name}\r\033[0K"}
+ let(:nested_section_start) { "section_start:#{nested_section_start_time.to_i}:#{nested_section_name}\r\033[0K" }
+ let(:nested_section_end) { "section_end:#{nested_section_end_time.to_i}:#{nested_section_name}\r\033[0K" }
it 'adds multiple sections to the lines inside the nested section' do
trace = "Hello#{section_start}foo#{nested_section_start}bar#{nested_section_end}baz#{section_end}world"
@@ -342,7 +342,7 @@ RSpec.describe Gitlab::Ci::Ansi2json do
end
context 'with section options' do
- let(:option_section_start) { "section_start:#{section_start_time.to_i}:#{section_name}[collapsed=true,unused_option=123]\r\033[0K"}
+ let(:option_section_start) { "section_start:#{section_start_time.to_i}:#{section_name}[collapsed=true,unused_option=123]\r\033[0K" }
it 'provides section options when set' do
trace = "#{option_section_start}hello#{section_end}"
@@ -463,8 +463,8 @@ RSpec.describe Gitlab::Ci::Ansi2json do
let(:section_duration) { 63.seconds }
let(:section_start_time) { Time.new(2019, 9, 17).utc }
let(:section_end_time) { section_start_time + section_duration }
- let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K"}
- let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K"}
+ let(:section_start) { "section_start:#{section_start_time.to_i}:#{section_name}\r\033[0K" }
+ let(:section_end) { "section_end:#{section_end_time.to_i}:#{section_name}\r\033[0K" }
context 'with split section body' do
let(:pre_text) { "#{section_start}this is a header\nand " }
diff --git a/spec/lib/gitlab/ci/artifacts/logger_spec.rb b/spec/lib/gitlab/ci/artifacts/logger_spec.rb
new file mode 100644
index 00000000000..7753cb0d25e
--- /dev/null
+++ b/spec/lib/gitlab/ci/artifacts/logger_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Artifacts::Logger do
+ before do
+ Gitlab::ApplicationContext.push(feature_category: 'test', caller_id: 'caller')
+ end
+
+ describe '.log_created' do
+ it 'logs information about created artifact' do
+ artifact = create(:ci_job_artifact, :archive)
+
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Artifact created',
+ job_artifact_id: artifact.id,
+ size: artifact.size,
+ type: artifact.file_type,
+ build_id: artifact.job_id,
+ project_id: artifact.project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ described_class.log_created(artifact)
+ end
+ end
+
+ describe '.log_deleted' do
+ it 'logs information about deleted artifacts' do
+ artifact_1 = create(:ci_job_artifact, :archive, :expired)
+ artifact_2 = create(:ci_job_artifact, :archive)
+ artifacts = [artifact_1, artifact_2]
+ method = 'Foo#method'
+
+ artifacts.each do |artifact|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Artifact deleted',
+ job_artifact_id: artifact.id,
+ expire_at: artifact.expire_at,
+ size: artifact.size,
+ type: artifact.file_type,
+ build_id: artifact.job_id,
+ project_id: artifact.project_id,
+ method: method,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+ end
+
+ described_class.log_deleted(artifacts, method)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/artifacts/metrics_spec.rb b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb
index 0ce76285b03..39e440f09e1 100644
--- a/spec/lib/gitlab/ci/artifacts/metrics_spec.rb
+++ b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb
@@ -5,6 +5,25 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Artifacts::Metrics, :prometheus do
let(:metrics) { described_class.new }
+ describe '.build_completed_report_type_counter' do
+ context 'when incrementing by more than one' do
+ let(:sast_counter) { described_class.send(:build_completed_report_type_counter, :sast) }
+ let(:dast_counter) { described_class.send(:build_completed_report_type_counter, :dast) }
+
+ it 'increments a single counter' do
+ [dast_counter, sast_counter].each do |counter|
+ counter.increment(status: 'success')
+ counter.increment(status: 'success')
+ counter.increment(status: 'failed')
+
+ expect(counter.get(status: 'success')).to eq 2.0
+ expect(counter.get(status: 'failed')).to eq 1.0
+ expect(counter.values.count).to eq 2
+ end
+ end
+ end
+ end
+
describe '#increment_destroyed_artifacts' do
context 'when incrementing by more than one' do
let(:counter) { metrics.send(:destroyed_artifacts_counter) }
diff --git a/spec/lib/gitlab/ci/build/artifacts/adapters/zip_stream_spec.rb b/spec/lib/gitlab/ci/build/artifacts/adapters/zip_stream_spec.rb
new file mode 100644
index 00000000000..2c236ba3726
--- /dev/null
+++ b/spec/lib/gitlab/ci/build/artifacts/adapters/zip_stream_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Build::Artifacts::Adapters::ZipStream do
+ let(:file_name) { 'single_file.zip' }
+ let(:fixture_path) { "lib/gitlab/ci/build/artifacts/adapters/zip_stream/#{file_name}" }
+ let(:stream) { File.open(expand_fixture_path(fixture_path), 'rb') }
+
+ describe '#initialize' do
+ it 'initializes when stream is passed' do
+ expect { described_class.new(stream) }.not_to raise_error
+ end
+
+ context 'when stream is not passed' do
+ let(:stream) { nil }
+
+ it 'raises an error' do
+ expect { described_class.new(stream) }.to raise_error(described_class::InvalidStreamError)
+ end
+ end
+ end
+
+ describe '#each_blob' do
+ let(:adapter) { described_class.new(stream) }
+
+ context 'when stream is a zip file' do
+ it 'iterates file content when zip file contains one file' do
+ expect { |b| adapter.each_blob(&b) }
+ .to yield_with_args("file 1 content\n")
+ end
+
+ context 'when zip file contains multiple files' do
+ let(:file_name) { 'multiple_files.zip' }
+
+ it 'iterates content of all files' do
+ expect { |b| adapter.each_blob(&b) }
+ .to yield_successive_args("file 1 content\n", "file 2 content\n")
+ end
+ end
+
+ context 'when zip file includes files in a directory' do
+ let(:file_name) { 'with_directory.zip' }
+
+ it 'iterates contents from files only' do
+ expect { |b| adapter.each_blob(&b) }
+ .to yield_successive_args("file 1 content\n", "file 2 content\n")
+ end
+ end
+
+ context 'when zip contains a file which decompresses beyond the size limit' do
+ let(:file_name) { '200_mb_decompressed.zip' }
+
+ it 'does not read the file' do
+ expect { |b| adapter.each_blob(&b) }.not_to yield_control
+ end
+ end
+
+ context 'when the zip contains too many files' do
+ let(:file_name) { '100_files.zip' }
+
+ it 'stops processing when the limit is reached' do
+ expect { |b| adapter.each_blob(&b) }
+ .to yield_control.exactly(described_class::MAX_FILES_PROCESSED).times
+ end
+ end
+
+ context 'when stream is a zipbomb' do
+ let(:file_name) { 'zipbomb.zip' }
+
+ it 'does not read the file' do
+ expect { |b| adapter.each_blob(&b) }.not_to yield_control
+ end
+ end
+ end
+
+ context 'when stream is not a zip file' do
+ let(:stream) { File.open(expand_fixture_path('junit/junit.xml.gz'), 'rb') }
+
+ it 'does not yield any data' do
+ expect { |b| adapter.each_blob(&b) }.not_to yield_control
+ expect { adapter.each_blob { |b| b } }.not_to raise_error
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb b/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb
index c8ace28108b..7b35c9ba483 100644
--- a/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb
+++ b/spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
subject { |example| path(example).children }
it { is_expected.to all(be_an_instance_of(described_class)) }
+
it do
is_expected.to contain_exactly entry('path/dir_1/file_1'),
entry('path/dir_1/file_b'),
@@ -79,6 +80,7 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
it { is_expected.to all(be_file) }
it { is_expected.to all(be_an_instance_of(described_class)) }
+
it do
is_expected.to contain_exactly entry('path/dir_1/file_1'),
entry('path/dir_1/file_b')
@@ -99,6 +101,7 @@ RSpec.describe Gitlab::Ci::Build::Artifacts::Metadata::Entry do
it { is_expected.to all(be_directory) }
it { is_expected.to all(be_an_instance_of(described_class)) }
+
it do
is_expected.to contain_exactly entry('path/dir_1/subdir/'),
entry('path/')
diff --git a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
index 94c14cfa479..baabab73ea2 100644
--- a/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
+++ b/spec/lib/gitlab/ci/build/prerequisite/kubernetes_namespace_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Ci::Build::Prerequisite::KubernetesNamespace do
end
context 'kubernetes namespace does not exist' do
- let(:namespace_builder) { double(execute: kubernetes_namespace)}
+ let(:namespace_builder) { double(execute: kubernetes_namespace) }
before do
allow(Clusters::KubernetesNamespaceFinder).to receive(:new)
diff --git a/spec/lib/gitlab/ci/build/releaser_spec.rb b/spec/lib/gitlab/ci/build/releaser_spec.rb
index 435f70e9ac5..ffa7073818a 100644
--- a/spec/lib/gitlab/ci/build/releaser_spec.rb
+++ b/spec/lib/gitlab/ci/build/releaser_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
name: 'Release $CI_COMMIT_SHA',
description: 'Created using the release-cli $EXTRA_DESCRIPTION',
tag_name: 'release-$CI_COMMIT_SHA',
+ tag_message: 'Annotated tag message',
ref: '$CI_COMMIT_SHA',
milestones: %w[m1 m2 m3],
released_at: '2020-07-15T08:00:00Z',
@@ -27,7 +28,7 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
end
it 'generates the script' do
- expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3" --assets-link "{\"name\":\"asset1\",\"url\":\"https://example.com/assets/1\",\"link_type\":\"other\",\"filepath\":\"/pretty/asset/1\"}" --assets-link "{\"name\":\"asset2\",\"url\":\"https://example.com/assets/2\"}"'])
+ expect(subject).to eq(['release-cli create --name "Release $CI_COMMIT_SHA" --description "Created using the release-cli $EXTRA_DESCRIPTION" --tag-name "release-$CI_COMMIT_SHA" --tag-message "Annotated tag message" --ref "$CI_COMMIT_SHA" --released-at "2020-07-15T08:00:00Z" --milestone "m1" --milestone "m2" --milestone "m3" --assets-link "{\"name\":\"asset1\",\"url\":\"https://example.com/assets/1\",\"link_type\":\"other\",\"filepath\":\"/pretty/asset/1\"}" --assets-link "{\"name\":\"asset2\",\"url\":\"https://example.com/assets/2\"}"'])
end
end
@@ -39,6 +40,7 @@ RSpec.describe Gitlab::Ci::Build::Releaser do
:name | 'Release $CI_COMMIT_SHA' | 'release-cli create --name "Release $CI_COMMIT_SHA"'
:description | 'Release-cli $EXTRA_DESCRIPTION' | 'release-cli create --description "Release-cli $EXTRA_DESCRIPTION"'
:tag_name | 'release-$CI_COMMIT_SHA' | 'release-cli create --tag-name "release-$CI_COMMIT_SHA"'
+ :tag_message | 'Annotated tag message' | 'release-cli create --tag-message "Annotated tag message"'
:ref | '$CI_COMMIT_SHA' | 'release-cli create --ref "$CI_COMMIT_SHA"'
:milestones | %w[m1 m2 m3] | 'release-cli create --milestone "m1" --milestone "m2" --milestone "m3"'
:released_at | '2020-07-15T08:00:00Z' | 'release-cli create --released-at "2020-07-15T08:00:00Z"'
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
index 3892b88598a..234ba68d627 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/changes_spec.rb
@@ -4,7 +4,9 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
describe '#satisfied_by?' do
- subject { described_class.new(globs).satisfied_by?(pipeline, context) }
+ let(:context) { instance_double(Gitlab::Ci::Build::Context::Base) }
+
+ subject(:satisfied_by) { described_class.new(globs).satisfied_by?(pipeline, context) }
context 'a glob matching rule' do
using RSpec::Parameterized::TableSyntax
@@ -18,11 +20,9 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
# rubocop:disable Layout/LineLength
where(:case_name, :globs, :files, :satisfied) do
- 'exact top-level match' | ['Dockerfile'] | { 'Dockerfile' => '', 'Gemfile' => '' } | true
'exact top-level match' | { paths: ['Dockerfile'] } | { 'Dockerfile' => '', 'Gemfile' => '' } | true
'exact top-level no match' | { paths: ['Dockerfile'] } | { 'Gemfile' => '' } | false
'pattern top-level match' | { paths: ['Docker*'] } | { 'Dockerfile' => '', 'Gemfile' => '' } | true
- 'pattern top-level no match' | ['Docker*'] | { 'Gemfile' => '' } | false
'pattern top-level no match' | { paths: ['Docker*'] } | { 'Gemfile' => '' } | false
'exact nested match' | { paths: ['project/build.properties'] } | { 'project/build.properties' => '' } | true
'exact nested no match' | { paths: ['project/build.properties'] } | { 'project/README.md' => '' } | false
@@ -92,5 +92,97 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Changes do
it { is_expected.to be_truthy }
end
end
+
+ context 'when using compare_to' do
+ let_it_be(:project) do
+ create(:project, :custom_repo,
+ files: { 'README.md' => 'readme' })
+ end
+
+ let_it_be(:user) { project.owner }
+
+ before_all do
+ project.repository.add_branch(user, 'feature_1', 'master')
+
+ project.repository.create_file(
+ user, 'file1.txt', 'file 1', message: 'Create file1.txt', branch_name: 'feature_1'
+ )
+ project.repository.add_tag(user, 'tag_1', 'feature_1')
+
+ project.repository.create_file(
+ user, 'file2.txt', 'file 2', message: 'Create file2.txt', branch_name: 'feature_1'
+ )
+ project.repository.add_branch(user, 'feature_2', 'feature_1')
+
+ project.repository.update_file(
+ user, 'file2.txt', 'file 2 updated', message: 'Update file2.txt', branch_name: 'feature_2'
+ )
+ end
+
+ context 'when compare_to is branch or tag' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:pipeline_ref, :compare_to, :paths, :ff, :result) do
+ 'feature_1' | 'master' | ['file1.txt'] | true | true
+ 'feature_1' | 'master' | ['README.md'] | true | false
+ 'feature_1' | 'master' | ['xyz.md'] | true | false
+ 'feature_2' | 'master' | ['file1.txt'] | true | true
+ 'feature_2' | 'master' | ['file2.txt'] | true | true
+ 'feature_2' | 'feature_1' | ['file1.txt'] | true | false
+ 'feature_2' | 'feature_1' | ['file1.txt'] | false | true
+ 'feature_2' | 'feature_1' | ['file2.txt'] | true | true
+ 'feature_1' | 'tag_1' | ['file1.txt'] | true | false
+ 'feature_1' | 'tag_1' | ['file1.txt'] | false | true
+ 'feature_1' | 'tag_1' | ['file2.txt'] | true | true
+ 'feature_2' | 'tag_1' | ['file2.txt'] | true | true
+ end
+
+ with_them do
+ let(:globs) { { paths: paths, compare_to: compare_to } }
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: pipeline_ref, sha: project.commit(pipeline_ref).sha)
+ end
+
+ before do
+ stub_feature_flags(ci_rules_changes_compare: ff)
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
+
+ context 'when compare_to is a sha' do
+ let(:globs) { { paths: ['file2.txt'], compare_to: project.commit('tag_1').sha } }
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: 'feature_2', sha: project.commit('feature_2').sha)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when compare_to is not a valid ref' do
+ let(:globs) { { paths: ['file1.txt'], compare_to: 'xyz' } }
+
+ let(:pipeline) do
+ build(:ci_pipeline, project: project, ref: 'feature_2', sha: project.commit('feature_2').sha)
+ end
+
+ it 'raises ParseError' do
+ expect { satisfied_by }.to raise_error(
+ ::Gitlab::Ci::Build::Rules::Rule::Clause::ParseError, 'rules:changes:compare_to is not a valid ref'
+ )
+ end
+
+ context 'when the FF ci_rules_changes_compare is disabled' do
+ before do
+ stub_feature_flags(ci_rules_changes_compare: false)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb
index 81bce989833..31c7437cfe0 100644
--- a/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb
+++ b/spec/lib/gitlab/ci/build/rules/rule/clause/if_spec.rb
@@ -51,14 +51,6 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::If do
end
it { is_expected.to eq(true) }
-
- context 'when the FF ci_fix_rules_if_comparison_with_regexp_variable is disabled' do
- before do
- stub_feature_flags(ci_fix_rules_if_comparison_with_regexp_variable: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
context 'when comparison is false' do
diff --git a/spec/lib/gitlab/ci/config/entry/image_spec.rb b/spec/lib/gitlab/ci/config/entry/image_spec.rb
index 0fa6d4f8804..6121c28070f 100644
--- a/spec/lib/gitlab/ci/config/entry/image_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/image_spec.rb
@@ -1,12 +1,8 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'support/helpers/stubbed_feature'
-require 'support/helpers/stub_feature_flags'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Image do
- include StubFeatureFlags
-
before do
stub_feature_flags(ci_docker_image_pull_policy: true)
diff --git a/spec/lib/gitlab/ci/config/entry/imageable_spec.rb b/spec/lib/gitlab/ci/config/entry/imageable_spec.rb
new file mode 100644
index 00000000000..88f8e260611
--- /dev/null
+++ b/spec/lib/gitlab/ci/config/entry/imageable_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Config::Entry::Imageable do
+ let(:node_class) do
+ Class.new(::Gitlab::Config::Entry::Node) do
+ include ::Gitlab::Ci::Config::Entry::Imageable
+
+ validations do
+ validates :config, allowed_keys: ::Gitlab::Ci::Config::Entry::Imageable::IMAGEABLE_ALLOWED_KEYS
+ end
+
+ def self.name
+ 'node'
+ end
+
+ def value
+ if string?
+ { name: @config }
+ elsif hash?
+ {
+ name: @config[:name]
+ }.compact
+ else
+ {}
+ end
+ end
+ end
+ end
+
+ subject(:entry) { node_class.new(config) }
+
+ before do
+ entry.compose!
+ end
+
+ context 'when entry value is correct' do
+ let(:config) { 'image:1.0' }
+
+ describe '#valid?' do
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when entry value is not correct' do
+ let(:config) { ['image:1.0'] }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors.first)
+ .to match /config should be a hash or a string/
+ end
+ end
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+
+ context 'when unexpected key is specified' do
+ let(:config) { { name: 'image:1.0', non_existing: 'test' } }
+
+ describe '#errors' do
+ it 'saves errors' do
+ expect(entry.errors.first)
+ .to match /config contains unknown keys: non_existing/
+ end
+ end
+
+ describe '#valid?' do
+ it 'is not valid' do
+ expect(entry).not_to be_valid
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 5b9337ede34..714b0a3b6aa 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -212,7 +212,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
- let(:variables) { }
+ let(:variables) {}
let(:deps) do
double('deps',
diff --git a/spec/lib/gitlab/ci/config/entry/release_spec.rb b/spec/lib/gitlab/ci/config/entry/release_spec.rb
index e5155f91be4..7b6b31ca748 100644
--- a/spec/lib/gitlab/ci/config/entry/release_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/release_spec.rb
@@ -128,25 +128,25 @@ RSpec.describe Gitlab::Ci::Config::Entry::Release do
end
context "when 'ref' is a short commit SHA" do
- let(:ref) { 'b3235930'}
+ let(:ref) { 'b3235930' }
it_behaves_like 'a valid entry'
end
context "when 'ref' is a branch name" do
- let(:ref) { 'fix/123-branch-name'}
+ let(:ref) { 'fix/123-branch-name' }
it_behaves_like 'a valid entry'
end
context "when 'ref' is a semantic versioning tag" do
- let(:ref) { 'v1.2.3'}
+ let(:ref) { 'v1.2.3' }
it_behaves_like 'a valid entry'
end
context "when 'ref' is a semantic versioning tag rc" do
- let(:ref) { 'v1.2.3-rc'}
+ let(:ref) { 'v1.2.3-rc' }
it_behaves_like 'a valid entry'
end
@@ -188,6 +188,30 @@ RSpec.describe Gitlab::Ci::Config::Entry::Release do
end
end
+ context "when value includes 'tag_message' keyword" do
+ let(:config) do
+ {
+ tag_name: 'v0.06',
+ description: "./release_changelog.txt",
+ tag_message: "Annotated tag message"
+ }
+ end
+
+ it_behaves_like 'a valid entry'
+ end
+
+ context "when 'tag_message' is nil" do
+ let(:config) do
+ {
+ tag_name: 'v0.06',
+ description: "./release_changelog.txt",
+ tag_message: nil
+ }
+ end
+
+ it_behaves_like 'a valid entry'
+ end
+
context 'when entry value is not correct' do
describe '#errors' do
context 'when value of attribute is invalid' do
@@ -231,6 +255,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Release do
it_behaves_like 'reports error', 'release milestones should be an array of strings or a string'
end
+
+ context 'when `tag_message` is not a string' do
+ let(:config) { { tag_message: 100 } }
+
+ it_behaves_like 'reports error', 'release tag message should be a string'
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 051cccb4833..45aa859a356 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -47,6 +47,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
:dotenv | 'build.dotenv'
:terraform | 'tfplan.json'
:accessibility | 'gl-accessibility.json'
+ :cyclonedx | 'gl-sbom.cdx.zip'
end
with_them do
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 55ad119ea21..1f8543227c9 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -155,7 +155,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }],
only: { refs: %w(branches tags) },
- job_variables: { 'VAR' => 'job' },
+ job_variables: { 'VAR' => { value: 'job' } },
root_variables_inheritance: true,
after_script: [],
ignore: false,
@@ -215,7 +215,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
- job_variables: { 'VAR' => 'job' },
+ job_variables: { 'VAR' => { value: 'job' } },
root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
index 295561b3c4d..64f0a64074c 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule/changes_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
let(:factory) do
@@ -119,6 +119,23 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
end
end
end
+
+ context 'with paths and compare_to' do
+ let(:config) { { paths: %w[app/ lib/], compare_to: 'branch1' } }
+
+ it { is_expected.to be_valid }
+
+ context 'when compare_to is not a string' do
+ let(:config) { { paths: %w[app/ lib/], compare_to: 1 } }
+
+ it { is_expected.not_to be_valid }
+
+ it 'returns information about errors' do
+ expect(entry.errors)
+ .to include(/should be a string/)
+ end
+ end
+ end
end
describe '#value' do
@@ -137,5 +154,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule::Changes do
it { is_expected.to eq(config) }
end
+
+ context 'with paths and compare_to' do
+ let(:config) do
+ { paths: ['app/', 'lib/'], compare_to: 'branch1' }
+ end
+
+ it { is_expected.to eq(config) }
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
index 93f4a66bfb6..c85fe366da6 100644
--- a/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/rules/rule_spec.rb
@@ -2,7 +2,6 @@
require 'fast_spec_helper'
require 'gitlab_chronic_duration'
-require 'support/helpers/stub_feature_flags'
require_dependency 'active_model'
RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
@@ -418,6 +417,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Rules::Rule do
it { is_expected.to eq(config) }
end
+
+ context 'when using changes with paths and compare_to' do
+ let(:config) { { changes: { paths: %w[app/ lib/ spec/ other/* paths/**/*.rb], compare_to: 'branch1' } } }
+
+ it { is_expected.to eq(config) }
+ end
end
context 'when default value has been provided' do
diff --git a/spec/lib/gitlab/ci/config/entry/service_spec.rb b/spec/lib/gitlab/ci/config/entry/service_spec.rb
index 3c000fd09ed..821ab442d61 100644
--- a/spec/lib/gitlab/ci/config/entry/service_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/service_spec.rb
@@ -1,12 +1,8 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
-require 'support/helpers/stubbed_feature'
-require 'support/helpers/stub_feature_flags'
+require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::Entry::Service do
- include StubFeatureFlags
-
before do
stub_feature_flags(ci_docker_image_pull_policy: true)
entry.compose!
diff --git a/spec/lib/gitlab/ci/config/entry/tags_spec.rb b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
index e05d4ae52b2..24efd08c855 100644
--- a/spec/lib/gitlab/ci/config/entry/tags_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/tags_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Tags do
end
context 'when tags limit is reached' do
- let(:config) { Array.new(50) {|i| "tag-#{i}" } }
+ let(:config) { Array.new(50) { |i| "tag-#{i}" } }
it 'reports error' do
expect(entry.errors)
diff --git a/spec/lib/gitlab/ci/config/external/file/base_spec.rb b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
index 280bebe1a7c..1306d61d99c 100644
--- a/spec/lib/gitlab/ci/config/external/file/base_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/base_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Base do
- let(:variables) { }
+ let(:variables) {}
let(:context_params) { { sha: 'HEAD', variables: variables } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Base do
describe '#to_hash' do
context 'with includes' do
let(:location) { 'some/file/config.yml' }
- let(:content) { 'include: { template: Bash.gitlab-ci.yml }'}
+ let(:content) { 'include: { template: Bash.gitlab-ci.yml }' }
before do
allow_any_instance_of(test_class)
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index 0e78498c98e..f5b36ebfa45 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -167,7 +167,7 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
describe '#to_hash' do
context 'properly includes another local file in the same repository' do
let(:location) { 'some/file/config.yml' }
- let(:content) { 'include: { local: another-config.yml }'}
+ let(:content) { 'include: { local: another-config.yml }' }
let(:another_location) { 'another-config.yml' }
let(:another_content) { 'rspec: JOB' }
diff --git a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
index 3e1c4df4e32..45dfea636f3 100644
--- a/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/remote_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config::External::File::Remote do
include StubRequests
- let(:variables) {Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) }
+ let(:variables) { Gitlab::Ci::Variables::Collection.new([{ 'key' => 'GITLAB_TOKEN', 'value' => 'secret_file', 'masked' => true }]) }
let(:context_params) { { sha: '12345', variables: variables } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { remote: location } }
diff --git a/spec/lib/gitlab/ci/config/normalizer_spec.rb b/spec/lib/gitlab/ci/config/normalizer_spec.rb
index 354392eb42e..96ca5d98a6e 100644
--- a/spec/lib/gitlab/ci/config/normalizer_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer_spec.rb
@@ -232,7 +232,7 @@ RSpec.describe Gitlab::Ci::Config::Normalizer do
context 'when parallel config does not matches a factory' do
let(:variables_config) { {} }
- let(:parallel_config) { }
+ let(:parallel_config) {}
it 'does not alter the job config' do
is_expected.to match(config)
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index 5eb04d969eb..055114769ea 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -872,4 +872,21 @@ RSpec.describe Gitlab::Ci::Config do
end
end
end
+
+ describe '#workflow_rules' do
+ subject(:workflow_rules) { config.workflow_rules }
+
+ let(:yml) do
+ <<-EOS
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME == "master"
+
+ rspec:
+ script: exit 0
+ EOS
+ end
+
+ it { is_expected.to eq([{ if: '$CI_COMMIT_REF_NAME == "master"' }]) }
+ end
end
diff --git a/spec/lib/gitlab/ci/cron_parser_spec.rb b/spec/lib/gitlab/ci/cron_parser_spec.rb
index 4017accb462..33474865a93 100644
--- a/spec/lib/gitlab/ci/cron_parser_spec.rb
+++ b/spec/lib/gitlab/ci/cron_parser_spec.rb
@@ -178,7 +178,7 @@ RSpec.describe Gitlab::Ci::CronParser do
end
context 'when time crosses a Daylight Savings boundary' do
- let(:cron) { '* 0 1 12 *'}
+ let(:cron) { '* 0 1 12 *' }
# Note this previously only failed if the time zone is set
# to a zone that observes Daylight Savings
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
new file mode 100644
index 00000000000..c99cfa94aa6
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_properties_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::CyclonedxProperties do
+ subject(:parse_source) { described_class.parse_source(properties) }
+
+ context 'when properties are nil' do
+ let(:properties) { nil }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when report does not have gitlab properties' do
+ let(:properties) { ['name' => 'foo', 'value' => 'bar'] }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when schema_version is missing' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when schema version is unsupported' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '2' },
+ { 'name' => 'gitlab:dependency_scanning:dependency_file', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager_name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language', 'value' => 'JavaScript' }
+ ]
+ end
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when no dependency_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' }
+ ]
+ end
+
+ it 'does not call dependency_scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).not_to receive(:parse_source)
+
+ parse_source
+ end
+ end
+
+ context 'when dependency_scanning properties are present' do
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
+ { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' },
+ { 'name' => 'gitlab:dependency_scanning:unsupported_property', 'value' => 'Should be ignored' }
+ ]
+ end
+
+ let(:expected_input) do
+ {
+ 'category' => 'development',
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
+
+ it 'passes only supported properties to the dependency scanning parser' do
+ expect(Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning).to receive(:source).with(expected_input)
+
+ parse_source
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
new file mode 100644
index 00000000000..431fe9f3591
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/cyclonedx_spec.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::Cyclonedx do
+ let(:report) { instance_double('Gitlab::Ci::Reports::Sbom::Report') }
+ let(:report_data) { base_report_data }
+ let(:raw_report_data) { report_data.to_json }
+ let(:report_valid?) { true }
+ let(:validator_errors) { [] }
+ let(:properties_parser) { class_double('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties') }
+
+ let(:base_report_data) do
+ {
+ 'bomFormat' => 'CycloneDX',
+ 'specVersion' => '1.4',
+ 'version' => 1
+ }
+ end
+
+ subject(:parse!) { described_class.new.parse!(raw_report_data, report) }
+
+ before do
+ allow_next_instance_of(Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator) do |validator|
+ allow(validator).to receive(:valid?).and_return(report_valid?)
+ allow(validator).to receive(:errors).and_return(validator_errors)
+ end
+
+ allow(properties_parser).to receive(:parse_source)
+ stub_const('Gitlab::Ci::Parsers::Sbom::CyclonedxProperties', properties_parser)
+ end
+
+ context 'when report JSON is invalid' do
+ let(:raw_report_data) { '{ ' }
+
+ it 'handles errors and adds them to the report' do
+ expect(report).to receive(:add_error).with(a_string_including("Report JSON is invalid:"))
+
+ expect { parse! }.not_to raise_error
+ end
+ end
+
+ context 'when report uses an unsupported spec version' do
+ let(:report_data) { base_report_data.merge({ 'specVersion' => '1.3' }) }
+
+ it 'reports unsupported version as an error' do
+ expect(report).to receive(:add_error).with("Unsupported CycloneDX spec version. Must be one of: 1.4")
+
+ parse!
+ end
+ end
+
+ context 'when report does not conform to the CycloneDX schema' do
+ let(:report_valid?) { false }
+ let(:validator_errors) { %w[error1 error2] }
+
+ it 'reports all errors returned by the validator' do
+ expect(report).to receive(:add_error).with("error1")
+ expect(report).to receive(:add_error).with("error2")
+
+ parse!
+ end
+ end
+
+ context 'when cyclonedx report has no components' do
+ it 'skips component processing' do
+ expect(report).not_to receive(:add_component)
+
+ parse!
+ end
+ end
+
+ context 'when report has components' do
+ let(:report_data) { base_report_data.merge({ 'components' => components }) }
+ let(:components) do
+ [
+ {
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "purl" => "pkg:gem/activesupport@5.1.4",
+ "type" => "library",
+ "bom-ref" => "pkg:gem/activesupport@5.1.4"
+ },
+ {
+ "name" => "byebug",
+ "version" => "10.0.0",
+ "purl" => "pkg:gem/byebug@10.0.0",
+ "type" => "library",
+ "bom-ref" => "pkg:gem/byebug@10.0.0"
+ },
+ {
+ "name" => "minimal-component",
+ "type" => "library"
+ },
+ {
+ # Should be skipped
+ "name" => "unrecognized-type",
+ "type" => "unknown"
+ }
+ ]
+ end
+
+ it 'adds each component, ignoring unused attributes' do
+ expect(report).to receive(:add_component)
+ .with({ "name" => "activesupport", "version" => "5.1.4", "type" => "library" })
+ expect(report).to receive(:add_component)
+ .with({ "name" => "byebug", "version" => "10.0.0", "type" => "library" })
+ expect(report).to receive(:add_component)
+ .with({ "name" => "minimal-component", "type" => "library" })
+
+ parse!
+ end
+ end
+
+ context 'when report has metadata properties' do
+ let(:report_data) { base_report_data.merge({ 'metadata' => { 'properties' => properties } }) }
+
+ let(:properties) do
+ [
+ { 'name' => 'gitlab:meta:schema_version', 'value' => '1' },
+ { 'name' => 'gitlab:dependency_scanning:category', 'value' => 'development' },
+ { 'name' => 'gitlab:dependency_scanning:input_file:path', 'value' => 'package-lock.json' },
+ { 'name' => 'gitlab:dependency_scanning:source_file:path', 'value' => 'package.json' },
+ { 'name' => 'gitlab:dependency_scanning:package_manager:name', 'value' => 'npm' },
+ { 'name' => 'gitlab:dependency_scanning:language:name', 'value' => 'JavaScript' }
+ ]
+ end
+
+ it 'passes them to the properties parser' do
+ expect(properties_parser).to receive(:parse_source).with(properties)
+
+ parse!
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb
new file mode 100644
index 00000000000..30114b17cac
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/source/dependency_scanning_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning do
+ subject { described_class.source(property_data) }
+
+ context 'when all property data is present' do
+ let(:property_data) do
+ {
+ 'category' => 'development',
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
+
+ it 'returns expected source data' do
+ is_expected.to eq({
+ 'type' => :dependency_scanning,
+ 'data' => property_data,
+ 'fingerprint' => '4dbcb747e6f0fb3ed4f48d96b777f1d64acdf43e459fdfefad404e55c004a188'
+ })
+ end
+ end
+
+ context 'when required properties are missing' do
+ let(:property_data) do
+ {
+ 'category' => 'development',
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ }
+ end
+
+ it { is_expected.to be_nil }
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
new file mode 100644
index 00000000000..c54a3268bbe
--- /dev/null
+++ b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator do
+ # Reports should be valid or invalid according to the specification at
+ # https://cyclonedx.org/docs/1.4/json/
+
+ subject(:validator) { described_class.new(report_data) }
+
+ let_it_be(:required_attributes) do
+ {
+ "bomFormat" => "CycloneDX",
+ "specVersion" => "1.4",
+ "version" => 1
+ }
+ end
+
+ context "with minimally valid report" do
+ let_it_be(:report_data) { required_attributes }
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when report has components" do
+ let(:report_data) { required_attributes.merge({ "components" => components }) }
+
+ context "with minimally valid components" do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug"
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when components have versions" do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4"
+ },
+ {
+ "type" => "library",
+ "name" => "byebug",
+ "version" => "10.0.0"
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when components are not valid" do
+ let(:components) do
+ [
+ { "type" => "foo" },
+ { "name" => "activesupport" }
+ ]
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "outputs errors for each validation failure" do
+ expect(validator.errors).to match_array([
+ "property '/components/0' is missing required keys: name",
+ "property '/components/0/type' is not one of: [\"application\", \"framework\"," \
+ " \"library\", \"container\", \"operating-system\", \"device\", \"firmware\", \"file\"]",
+ "property '/components/1' is missing required keys: type"
+ ])
+ end
+ end
+ end
+
+ context "when report has metadata" do
+ let(:metadata) do
+ {
+ "timestamp" => "2022-02-23T08:02:39Z",
+ "tools" => [{ "vendor" => "GitLab", "name" => "Gemnasium", "version" => "2.34.0" }],
+ "authors" => [{ "name" => "GitLab", "email" => "support@gitlab.com" }]
+ }
+ end
+
+ let(:report_data) { required_attributes.merge({ "metadata" => metadata }) }
+
+ it { is_expected.to be_valid }
+
+ context "when metadata has properties" do
+ before do
+ metadata.merge!({ "properties" => properties })
+ end
+
+ context "when properties are valid" do
+ let(:properties) do
+ [
+ { "name" => "gitlab:dependency_scanning:input_file", "value" => "Gemfile.lock" },
+ { "name" => "gitlab:dependency_scanning:package_manager", "value" => "bundler" }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context "when properties are invalid" do
+ let(:properties) do
+ [
+ { "name" => ["gitlab:meta:schema_version"], "value" => 1 }
+ ]
+ end
+
+ it { is_expected.not_to be_valid }
+
+ it "outputs errors for each validation failure" do
+ expect(validator.errors).to match_array([
+ "property '/metadata/properties/0/name' is not of type: string",
+ "property '/metadata/properties/0/value' is not of type: string"
+ ])
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
index d06077d69b6..7828aa99f6a 100644
--- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -6,6 +6,10 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let_it_be(:project) { create(:project) }
let(:supported_dast_versions) { described_class::SUPPORTED_VERSIONS[:dast].join(', ') }
+ let(:deprecated_schema_version_message) {}
+ let(:missing_schema_version_message) do
+ "Report version not provided, dast report type supports versions: #{supported_dast_versions}"
+ end
let(:scanner) do
{
@@ -24,7 +28,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
expect(described_class::SUPPORTED_VERSIONS.keys).to eq(described_class::DEPRECATED_VERSIONS.keys)
end
- context 'when a schema JSON file exists for a particular report type version' do
+ context 'when all files under schema path are explicitly listed' do
# We only care about the part that comes before report-format.json
# https://rubular.com/r/N8Juz7r8hYDYgD
filename_regex = /(?<report_type>[-\w]*)\-report-format.json/
@@ -38,7 +42,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
matches = filename_regex.match(file)
report_type = matches[:report_type].tr("-", "_").to_sym
- it "#{report_type} #{version} is in the constant" do
+ it "#{report_type} #{version}" do
expect(described_class::SUPPORTED_VERSIONS[report_type]).to include(version)
end
end
@@ -64,11 +68,54 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
describe '#valid?' do
subject { validator.valid? }
+ context 'when given a supported MAJOR.MINOR schema version' do
+ let(:report_type) { :dast }
+ let(:report_version) do
+ latest_vendored_version = described_class::SUPPORTED_VERSIONS[report_type].last.split(".")
+ (latest_vendored_version[0...2] << "34").join(".")
+ end
+
+ context 'and the report is valid' do
+ let(:report_data) do
+ {
+ 'version' => report_version,
+ 'vulnerabilities' => []
+ }
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'and the report is invalid' do
+ let(:report_data) do
+ {
+ 'version' => report_version
+ }
+ end
+
+ it { is_expected.to be_falsey }
+
+ it 'logs related information' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "security report schema validation problem",
+ security_report_type: report_type,
+ security_report_version: report_version,
+ project_id: project.id,
+ security_report_failure: 'schema_validation_fails',
+ security_report_scanner_id: 'gemnasium',
+ security_report_scanner_version: '2.1.0'
+ )
+
+ subject
+ end
+ end
+ end
+
context 'when given a supported schema version' do
let(:report_type) { :dast }
let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last }
- context 'when the report is valid' do
+ context 'and the report is valid' do
let(:report_data) do
{
'version' => report_version,
@@ -79,7 +126,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_truthy }
end
- context 'when the report is invalid' do
+ context 'and the report is invalid' do
let(:report_data) do
{
'version' => report_version
@@ -118,7 +165,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
stub_const("#{described_class}::DEPRECATED_VERSIONS", deprecations_hash)
end
- context 'when the report passes schema validation' do
+ context 'and the report passes schema validation' do
let(:report_data) do
{
'version' => '10.0.0',
@@ -143,34 +190,14 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
end
end
- context 'when the report does not pass schema validation' do
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
- end
-
- let(:report_data) do
- {
- 'version' => 'V2.7.0'
- }
- end
-
- it { is_expected.to be_falsey }
+ context 'and the report does not pass schema validation' do
+ let(:report_data) do
+ {
+ 'version' => 'V2.7.0'
+ }
end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- let(:report_data) do
- {
- 'version' => 'V2.7.0'
- }
- end
-
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to be_falsey }
end
end
@@ -178,20 +205,40 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:report_type) { :dast }
let(:report_version) { "12.37.0" }
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
+ context 'and the report is valid' do
+ let(:report_data) do
+ {
+ 'version' => report_version,
+ 'vulnerabilities' => []
+ }
end
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
+ it { is_expected.to be_falsey }
+
+ it 'logs related information' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "security report schema validation problem",
+ security_report_type: report_type,
+ security_report_version: report_version,
+ project_id: project.id,
+ security_report_failure: 'using_unsupported_schema_version',
+ security_report_scanner_id: 'gemnasium',
+ security_report_scanner_version: '2.1.0'
+ )
- it { is_expected.to be_falsey }
+ subject
+ end
+ end
+
+ context 'and the report is invalid' do
+ let(:report_data) do
+ {
+ 'version' => report_version
+ }
+ end
+
+ context 'and scanner information is empty' do
+ let(:scanner) { {} }
it 'logs related information' do
expect(Gitlab::AppLogger).to receive(:info).with(
@@ -199,79 +246,26 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
security_report_type: report_type,
security_report_version: report_version,
project_id: project.id,
+ security_report_failure: 'schema_validation_fails',
+ security_report_scanner_id: nil,
+ security_report_scanner_version: nil
+ )
+
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "security report schema validation problem",
+ security_report_type: report_type,
+ security_report_version: report_version,
+ project_id: project.id,
security_report_failure: 'using_unsupported_schema_version',
- security_report_scanner_id: 'gemnasium',
- security_report_scanner_version: '2.1.0'
+ security_report_scanner_id: nil,
+ security_report_scanner_version: nil
)
subject
end
end
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- context 'when scanner information is empty' do
- let(:scanner) { {} }
-
- it 'logs related information' do
- expect(Gitlab::AppLogger).to receive(:info).with(
- message: "security report schema validation problem",
- security_report_type: report_type,
- security_report_version: report_version,
- project_id: project.id,
- security_report_failure: 'schema_validation_fails',
- security_report_scanner_id: nil,
- security_report_scanner_version: nil
- )
-
- expect(Gitlab::AppLogger).to receive(:info).with(
- message: "security report schema validation problem",
- security_report_type: report_type,
- security_report_version: report_version,
- project_id: project.id,
- security_report_failure: 'using_unsupported_schema_version',
- security_report_scanner_id: nil,
- security_report_scanner_version: nil
- )
-
- subject
- end
- end
-
- it { is_expected.to be_falsey }
- end
- end
-
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
-
- it { is_expected.to be_truthy }
- end
-
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- it { is_expected.to be_truthy }
- end
+ it { is_expected.to be_falsey }
end
end
@@ -284,19 +278,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
}
end
- before do
- stub_feature_flags(enforce_security_report_validation: true)
- end
-
it { is_expected.to be_falsey }
-
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- it { is_expected.to be_truthy }
- end
end
end
@@ -307,7 +289,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:report_type) { :dast }
let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last }
- context 'when the report is valid' do
+ context 'and the report is valid' do
let(:report_data) do
{
'version' => report_version,
@@ -318,34 +300,20 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_empty }
end
- context 'when the report is invalid' do
+ context 'and the report is invalid' do
let(:report_data) do
{
'version' => report_version
}
end
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: project)
- end
-
- let(:expected_errors) do
- [
- 'root is missing required keys: vulnerabilities'
- ]
- end
-
- it { is_expected.to match_array(expected_errors) }
+ let(:expected_errors) do
+ [
+ 'root is missing required keys: vulnerabilities'
+ ]
end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- it { is_expected.to be_empty }
- end
+ it { is_expected.to match_array(expected_errors) }
end
end
@@ -363,7 +331,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
stub_const("#{described_class}::DEPRECATED_VERSIONS", deprecations_hash)
end
- context 'when the report passes schema validation' do
+ context 'and the report passes schema validation' do
let(:report_data) do
{
'version' => '10.0.0',
@@ -374,119 +342,77 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_empty }
end
- context 'when the report does not pass schema validation' do
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
- end
-
- let(:report_data) do
- {
- 'version' => 'V2.7.0'
- }
- end
-
- let(:expected_errors) do
- [
- "property '/version' does not match pattern: ^[0-9]+\\.[0-9]+\\.[0-9]+$",
- "root is missing required keys: vulnerabilities"
- ]
- end
-
- it { is_expected.to match_array(expected_errors) }
+ context 'and the report does not pass schema validation' do
+ let(:report_data) do
+ {
+ 'version' => 'V2.7.0'
+ }
end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- let(:report_data) do
- {
- 'version' => 'V2.7.0'
- }
- end
-
- it { is_expected.to be_empty }
+ let(:expected_errors) do
+ [
+ "property '/version' does not match pattern: ^[0-9]+\\.[0-9]+\\.[0-9]+$",
+ "root is missing required keys: vulnerabilities"
+ ]
end
+
+ it { is_expected.to match_array(expected_errors) }
end
end
context 'when given an unsupported schema version' do
let(:report_type) { :dast }
let(:report_version) { "12.37.0" }
+ let(:expected_unsupported_message) do
+ "Version #{report_version} for report type #{report_type} is unsupported, supported versions for this report type are: "\
+ "#{supported_dast_versions}. GitLab will attempt to validate this report against the earliest supported "\
+ "versions of this report type, to show all the errors but will not ingest the report"
+ end
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
+ context 'and the report is valid' do
+ let(:report_data) do
+ {
+ 'version' => report_version,
+ 'vulnerabilities' => []
+ }
end
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
-
- let(:expected_errors) do
- [
- "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: #{supported_dast_versions}"
- ]
- end
-
- it { is_expected.to match_array(expected_errors) }
+ let(:expected_errors) do
+ [
+ expected_unsupported_message
+ ]
end
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- let(:expected_errors) do
- [
- "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: #{supported_dast_versions}",
- "root is missing required keys: vulnerabilities"
- ]
- end
-
- it { is_expected.to match_array(expected_errors) }
- end
+ it { is_expected.to match_array(expected_errors) }
end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
+ context 'and the report is invalid' do
+ let(:report_data) do
+ {
+ 'version' => report_version
+ }
end
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
-
- it { is_expected.to be_empty }
+ let(:expected_errors) do
+ [
+ expected_unsupported_message,
+ "root is missing required keys: vulnerabilities"
+ ]
end
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- it { is_expected.to be_empty }
- end
+ it { is_expected.to match_array(expected_errors) }
end
end
context 'when not given a schema version' do
let(:report_type) { :dast }
let(:report_version) { nil }
+ let(:expected_missing_version_message) do
+ "Report version not provided, #{report_type} report type supports versions: #{supported_dast_versions}. GitLab "\
+ "will attempt to validate this report against the earliest supported versions of this report type, to show all "\
+ "the errors but will not ingest the report"
+ end
+
let(:report_data) do
{
'vulnerabilities' => []
@@ -496,19 +422,11 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:expected_errors) do
[
"root is missing required keys: version",
- "Report version not provided, dast report type supports versions: #{supported_dast_versions}"
+ expected_missing_version_message
]
end
it { is_expected.to match_array(expected_errors) }
-
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- it { is_expected.to be_empty }
- end
end
end
@@ -519,7 +437,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:report_type) { :dast }
let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last }
- context 'when the report is valid' do
+ context 'and the report is valid' do
let(:report_data) do
{
'version' => report_version,
@@ -530,7 +448,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_empty }
end
- context 'when the report is invalid' do
+ context 'and the report is invalid' do
let(:report_data) do
{
'version' => report_version
@@ -550,9 +468,14 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
end
let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last }
+ let(:expected_deprecation_message) do
+ "Version #{report_version} for report type #{report_type} has been deprecated, supported versions for this "\
+ "report type are: #{supported_dast_versions}. GitLab will attempt to parse and ingest this report if valid."
+ end
+
let(:expected_deprecation_warnings) do
[
- "Version V2.7.0 for report type dast has been deprecated, supported versions for this report type are: #{supported_dast_versions}"
+ expected_deprecation_message
]
end
@@ -560,7 +483,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
stub_const("#{described_class}::DEPRECATED_VERSIONS", deprecations_hash)
end
- context 'when the report passes schema validation' do
+ context 'and the report passes schema validation' do
let(:report_data) do
{
'version' => report_version,
@@ -571,7 +494,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to match_array(expected_deprecation_warnings) }
end
- context 'when the report does not pass schema validation' do
+ context 'and the report does not pass schema validation' do
let(:report_data) do
{
'version' => 'V2.7.0'
@@ -600,11 +523,27 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
describe '#warnings' do
subject { validator.warnings }
- context 'when given a supported schema version' do
+ context 'when given a supported MAJOR.MINOR schema version' do
let(:report_type) { :dast }
- let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last }
+ let(:report_version) do
+ latest_vendored_version = described_class::SUPPORTED_VERSIONS[report_type].last.split(".")
+ (latest_vendored_version[0...2] << "34").join(".")
+ end
+
+ let(:latest_patch_version) do
+ ::Security::ReportSchemaVersionMatcher.new(
+ report_declared_version: report_version,
+ supported_versions: described_class::SUPPORTED_VERSIONS[report_type]
+ ).call
+ end
+
+ let(:message) do
+ "This report uses a supported MAJOR.MINOR schema version but the PATCH version doesn't match"\
+ " any vendored schema version. Validation will be attempted against version"\
+ " #{latest_patch_version}"
+ end
- context 'when the report is valid' do
+ context 'and the report is valid' do
let(:report_data) do
{
'version' => report_version,
@@ -612,37 +551,57 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
}
end
- it { is_expected.to be_empty }
+ it { is_expected.to match_array([message]) }
end
- context 'when the report is invalid' do
+ context 'and the report is invalid' do
let(:report_data) do
{
'version' => report_version
}
end
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: project)
- end
+ it { is_expected.to match_array([message]) }
+
+ it 'logs related information' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "security report schema validation problem",
+ security_report_type: report_type,
+ security_report_version: report_version,
+ project_id: project.id,
+ security_report_failure: 'schema_validation_fails',
+ security_report_scanner_id: 'gemnasium',
+ security_report_scanner_version: '2.1.0'
+ )
- it { is_expected.to be_empty }
+ subject
end
+ end
+ end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
+ context 'when given a supported schema version' do
+ let(:report_type) { :dast }
+ let(:report_version) { described_class::SUPPORTED_VERSIONS[report_type].last }
- let(:expected_warnings) do
- [
- 'root is missing required keys: vulnerabilities'
- ]
- end
+ context 'and the report is valid' do
+ let(:report_data) do
+ {
+ 'version' => report_version,
+ 'vulnerabilities' => []
+ }
+ end
+
+ it { is_expected.to be_empty }
+ end
- it { is_expected.to match_array(expected_warnings) }
+ context 'and the report is invalid' do
+ let(:report_data) do
+ {
+ 'version' => report_version
+ }
end
+
+ it { is_expected.to be_empty }
end
end
@@ -660,7 +619,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
stub_const("#{described_class}::DEPRECATED_VERSIONS", deprecations_hash)
end
- context 'when the report passes schema validation' do
+ context 'and the report passes schema validation' do
let(:report_data) do
{
'vulnerabilities' => []
@@ -670,35 +629,14 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it { is_expected.to be_empty }
end
- context 'when the report does not pass schema validation' do
+ context 'and the report does not pass schema validation' do
let(:report_data) do
{
'version' => 'V2.7.0'
}
end
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
- end
-
- it { is_expected.to be_empty }
- end
-
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- let(:expected_warnings) do
- [
- "property '/version' does not match pattern: ^[0-9]+\\.[0-9]+\\.[0-9]+$",
- "root is missing required keys: vulnerabilities"
- ]
- end
-
- it { is_expected.to match_array(expected_warnings) }
- end
+ it { is_expected.to be_empty }
end
end
@@ -706,71 +644,25 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:report_type) { :dast }
let(:report_version) { "12.37.0" }
- context 'when enforce_security_report_validation is enabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: true)
- end
-
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
-
- it { is_expected.to be_empty }
+ context 'and the report is valid' do
+ let(:report_data) do
+ {
+ 'version' => report_version,
+ 'vulnerabilities' => []
+ }
end
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- it { is_expected.to be_empty }
- end
+ it { is_expected.to be_empty }
end
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- context 'when the report is valid' do
- let(:report_data) do
- {
- 'version' => report_version,
- 'vulnerabilities' => []
- }
- end
-
- let(:expected_warnings) do
- [
- "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: #{supported_dast_versions}"
- ]
- end
-
- it { is_expected.to match_array(expected_warnings) }
+ context 'and the report is invalid' do
+ let(:report_data) do
+ {
+ 'version' => report_version
+ }
end
- context 'when the report is invalid' do
- let(:report_data) do
- {
- 'version' => report_version
- }
- end
-
- let(:expected_warnings) do
- [
- "Version 12.37.0 for report type dast is unsupported, supported versions for this report type are: #{supported_dast_versions}",
- "root is missing required keys: vulnerabilities"
- ]
- end
-
- it { is_expected.to match_array(expected_warnings) }
- end
+ it { is_expected.to be_empty }
end
end
@@ -784,21 +676,6 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
end
it { is_expected.to be_empty }
-
- context 'when enforce_security_report_validation is disabled' do
- before do
- stub_feature_flags(enforce_security_report_validation: false)
- end
-
- let(:expected_warnings) do
- [
- "root is missing required keys: version",
- "Report version not provided, dast report type supports versions: #{supported_dast_versions}"
- ]
- end
-
- it { is_expected.to match_array(expected_warnings) }
- end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 0d78ce3440a..de43e759193 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -282,7 +282,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
subject { command.ambiguous_ref? }
context 'when ref is not ambiguous' do
- it { is_expected. to eq(false) }
+ it { is_expected.to eq(false) }
end
context 'when ref is ambiguous' do
@@ -291,7 +291,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
project.repository.add_branch(project.creator, 'ref', 'master')
end
- it { is_expected. to eq(true) }
+ it { is_expected.to eq(true) }
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
index cbf92f8fa83..be5d3a96126 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
end
context 'when the corresponding environment does not exist' do
- let!(:environment) { }
+ let!(:environment) {}
it 'does not create a deployment record' do
expect { subject }.not_to change { Deployment.count }
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index e30a78546af..eb5a37f19f4 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
before do
allow(step).to receive(:workflow_rules_result)
.and_return(
- double(pass?: true, variables: { 'VAR1' => 'val2' })
+ double(pass?: true, variables: { 'VAR1' => 'val2', 'VAR2' => 3 })
)
step.perform!
@@ -65,7 +65,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
end
it 'saves workflow_rules_result' do
- expect(command.workflow_rules_result.variables).to eq({ 'VAR1' => 'val2' })
+ expect(command.workflow_rules_result.variables).to eq({ 'VAR1' => 'val2', 'VAR2' => 3 })
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
index fabfbd779f3..5ee96b0baa8 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_block_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::SeedBlock do
let(:project) { create(:project, :repository) }
let(:user) { create(:user, developer_projects: [project]) }
- let(:seeds_block) { }
+ let(:seeds_block) {}
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 687bb82a8ef..f7774e199fb 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
- let(:seeds_block) { }
+ let(:seeds_block) {}
let(:command) { initialize_command }
let(:pipeline) { build(:ci_pipeline, project: project) }
@@ -205,6 +205,30 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
end
+ describe '#rule_variables' do
+ let(:config) do
+ {
+ variables: { VAR1: 11 },
+ workflow: {
+ rules: [{ if: '$CI_PIPELINE_SOURCE',
+ variables: { SYMBOL: :symbol, STRING: "string", INTEGER: 1 } },
+ { when: 'always' }]
+ },
+ rspec: { script: 'rake' }
+ }
+ end
+
+ let(:rspec_variables) { command.pipeline_seed.stages[0].statuses[0].variables.to_hash }
+
+ it 'correctly parses rule variables' do
+ run_chain
+
+ expect(rspec_variables['SYMBOL']).to eq("symbol")
+ expect(rspec_variables['STRING']).to eq("string")
+ expect(rspec_variables['INTEGER']).to eq("1")
+ end
+ end
+
context 'N+1 queries' do
it 'avoids N+1 queries when calculating variables of jobs', :use_sql_query_cache do
warm_up_pipeline, warm_up_command = prepare_pipeline1
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index eeac0c85a77..fb1a360a4b7 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -148,6 +148,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
payload = Gitlab::Json.parse(params[:body])
+ expect(payload['total_builds_count']).to eq(0)
+
builds = payload['builds']
expect(builds.count).to eq(2)
expect(builds[0]['services']).to be_nil
@@ -160,6 +162,23 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
perform!
end
+
+ context "with existing jobs from other project's alive pipelines" do
+ before do
+ create(:ci_pipeline, :with_job, user: user)
+ create(:ci_pipeline, :with_job)
+ end
+
+ it 'returns the expected total_builds_count' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ payload = Gitlab::Json.parse(params[:body])
+
+ expect(payload['total_builds_count']).to eq(1)
+ end
+
+ perform!
+ end
+ end
end
context 'when EXTERNAL_VALIDATION_SERVICE_TOKEN is set' do
@@ -243,7 +262,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
context 'when save_incompleted is false' do
- let(:save_incompleted) { false}
+ let(:save_incompleted) { false }
it 'adds errors to the pipeline without dropping it' do
perform!
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
index 83742699d3d..47f172922a5 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/matches_spec.rb
@@ -160,14 +160,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::Matches do
let(:left_value) { 'abcde' }
it { is_expected.to eq(true) }
-
- context 'when the FF ci_fix_rules_if_comparison_with_regexp_variable is disabled' do
- before do
- stub_feature_flags(ci_fix_rules_if_comparison_with_regexp_variable: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
context 'when not matching' do
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
index aad33106647..9e7ea3e4ea4 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexeme/not_matches_spec.rb
@@ -160,14 +160,6 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Lexeme::NotMatches do
let(:left_value) { 'abcde' }
it { is_expected.to eq(false) }
-
- context 'when the FF ci_fix_rules_if_comparison_with_regexp_variable is disabled' do
- before do
- stub_feature_flags(ci_fix_rules_if_comparison_with_regexp_variable: false)
- end
-
- it { is_expected.to eq(true) }
- end
end
context 'when not matching' do
diff --git a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
index bbd11a00149..acaec07f95b 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/statement_spec.rb
@@ -179,24 +179,16 @@ RSpec.describe Gitlab::Ci::Pipeline::Expression::Statement do
.to_hash
end
- where(:expression, :ff, :result) do
- '$teststring =~ "abcde"' | true | true
- '$teststring =~ "abcde"' | false | true
- '$teststring =~ $teststring' | true | true
- '$teststring =~ $teststring' | false | true
- '$teststring =~ $pattern1' | true | true
- '$teststring =~ $pattern1' | false | false
- '$teststring =~ $pattern2' | true | false
- '$teststring =~ $pattern2' | false | false
+ where(:expression, :result) do
+ '$teststring =~ "abcde"' | true
+ '$teststring =~ $teststring' | true
+ '$teststring =~ $pattern1' | true
+ '$teststring =~ $pattern2' | false
end
with_them do
let(:text) { expression }
- before do
- stub_feature_flags(ci_fix_rules_if_comparison_with_regexp_variable: ff)
- end
-
it { is_expected.to eq(result) }
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
index 8f727749ee2..a742c619584 100644
--- a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Quota::Deployments do
let(:pipeline) { build_stubbed(:ci_pipeline, project: project) }
- let(:pipeline_seed) { double(:pipeline_seed, deployments_count: 2)}
+ let(:pipeline_seed) { double(:pipeline_seed, deployments_count: 2) }
let(:command) do
double(:command,
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 040f3ab5830..75f6a773c2d 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -97,15 +97,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:attributes) do
{ name: 'rspec',
ref: 'master',
- job_variables: [{ key: 'VAR1', value: 'var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true }],
+ job_variables: [{ key: 'VAR1', value: 'var 1' },
+ { key: 'VAR2', value: 'var 2' }],
rules: [{ if: '$VAR == null', variables: { VAR1: 'new var 1', VAR3: 'var 3' } }] }
end
it do
- is_expected.to include(yaml_variables: [{ key: 'VAR1', value: 'new var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }])
+ is_expected.to include(yaml_variables: [{ key: 'VAR1', value: 'new var 1' },
+ { key: 'VAR3', value: 'var 3' },
+ { key: 'VAR2', value: 'var 2' }])
end
end
@@ -114,13 +114,13 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
{
name: 'rspec',
ref: 'master',
- job_variables: [{ key: 'VARIABLE', value: 'value', public: true }],
+ job_variables: [{ key: 'VARIABLE', value: 'value' }],
tag_list: ['static-tag', '$VARIABLE', '$NO_VARIABLE']
}
end
it { is_expected.to include(tag_list: ['static-tag', 'value', '$NO_VARIABLE']) }
- it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value', public: true }]) }
+ it { is_expected.to include(yaml_variables: [{ key: 'VARIABLE', value: 'value' }]) }
end
context 'with cache:key' do
@@ -257,19 +257,19 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:attributes) do
{ name: 'rspec',
ref: 'master',
- yaml_variables: [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }],
- job_variables: [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }],
+ yaml_variables: [{ key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' }],
+ job_variables: [{ key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' }],
root_variables_inheritance: root_variables_inheritance }
end
context 'when the pipeline has variables' do
let(:root_variables) do
- [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
- { key: 'VAR2', value: 'var pipeline 2', public: true },
- { key: 'VAR3', value: 'var pipeline 3', public: true },
- { key: 'VAR4', value: 'new var pipeline 4', public: true }]
+ [{ key: 'VAR1', value: 'var overridden pipeline 1' },
+ { key: 'VAR2', value: 'var pipeline 2' },
+ { key: 'VAR3', value: 'var pipeline 3' },
+ { key: 'VAR4', value: 'new var pipeline 4' }]
end
context 'when root_variables_inheritance is true' do
@@ -277,10 +277,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'returns calculated yaml variables' do
expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true },
- { key: 'VAR4', value: 'new var pipeline 4', public: true }]
+ [{ key: 'VAR1', value: 'var overridden pipeline 1' },
+ { key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' },
+ { key: 'VAR4', value: 'new var pipeline 4' }]
)
end
end
@@ -290,8 +290,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'returns job variables' do
expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }]
+ [{ key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' }]
)
end
end
@@ -301,9 +301,9 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'returns calculated yaml variables' do
expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }]
+ [{ key: 'VAR1', value: 'var overridden pipeline 1' },
+ { key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' }]
)
end
end
@@ -314,8 +314,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'returns seed yaml variables' do
expect(subject[:yaml_variables]).to match_array(
- [{ key: 'VAR2', value: 'var 2', public: true },
- { key: 'VAR3', value: 'var 3', public: true }])
+ [{ key: 'VAR2', value: 'var 2' },
+ { key: 'VAR3', value: 'var 3' }])
end
end
end
@@ -324,8 +324,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:attributes) do
{ name: 'rspec',
ref: 'master',
- yaml_variables: [{ key: 'VAR1', value: 'var 1', public: true }],
- job_variables: [{ key: 'VAR1', value: 'var 1', public: true }],
+ yaml_variables: [{ key: 'VAR1', value: 'var 1' }],
+ job_variables: [{ key: 'VAR1', value: 'var 1' }],
root_variables_inheritance: root_variables_inheritance,
rules: rules }
end
@@ -338,14 +338,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
it 'recalculates the variables' do
- expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1', public: true },
- { key: 'VAR2', value: 'new var 2', public: true })
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' },
+ { key: 'VAR2', value: 'new var 2' })
end
end
context 'when the rules use root variables' do
let(:root_variables) do
- [{ key: 'VAR2', value: 'var pipeline 2', public: true }]
+ [{ key: 'VAR2', value: 'var pipeline 2' }]
end
let(:rules) do
@@ -353,15 +353,15 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
it 'recalculates the variables' do
- expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1', public: true },
- { key: 'VAR2', value: 'overridden var 2', public: true })
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1' },
+ { key: 'VAR2', value: 'overridden var 2' })
end
context 'when the root_variables_inheritance is false' do
let(:root_variables_inheritance) { false }
it 'does not recalculate the variables' do
- expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'var 1', public: true })
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'var 1' })
end
end
end
@@ -769,7 +769,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
with_them do
it { is_expected.not_to be_included }
- it 'correctly populates when:' do
+ it 'still correctly populates when:' do
expect(seed_build.attributes).to include(when: 'never')
end
end
@@ -958,6 +958,26 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
expect(seed_build.attributes).to include(when: 'never')
end
end
+
+ context 'with invalid rules raising error' do
+ let(:rule_set) do
+ [
+ { changes: { paths: ['README.md'], compare_to: 'invalid-ref' }, when: 'never' }
+ ]
+ end
+
+ it { is_expected.not_to be_included }
+
+ it 'correctly populates when:' do
+ expect(seed_build.attributes).to include(when: 'never')
+ end
+
+ it 'returns an error' do
+ expect(seed_build.errors).to contain_exactly(
+ 'Failed to parse rule for rspec: rules:changes:compare_to is not a valid ref'
+ )
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/reports/sbom/component_spec.rb b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
new file mode 100644
index 00000000000..672117c311f
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/sbom/component_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Sbom::Component do
+ let(:attributes) do
+ {
+ 'type' => 'library',
+ 'name' => 'component-name',
+ 'version' => 'v0.0.1'
+ }
+ end
+
+ subject { described_class.new(attributes) }
+
+ it 'has correct attributes' do
+ expect(subject).to have_attributes(
+ component_type: 'library',
+ name: 'component-name',
+ version: 'v0.0.1'
+ )
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/sbom/report_spec.rb b/spec/lib/gitlab/ci/reports/sbom/report_spec.rb
new file mode 100644
index 00000000000..d7a285ab13c
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/sbom/report_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Sbom::Report do
+ subject(:report) { described_class.new }
+
+ describe '#add_error' do
+ it 'appends errors to a list' do
+ report.add_error('error1')
+ report.add_error('error2')
+
+ expect(report.errors).to match_array(%w[error1 error2])
+ end
+ end
+
+ describe '#set_source' do
+ let_it_be(:source) do
+ {
+ 'type' => :dependency_scanning,
+ 'data' => {
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ },
+ 'fingerprint' => 'c01df1dc736c1148717e053edbde56cb3a55d3e31f87cea955945b6f67c17d42'
+ }
+ end
+
+ it 'stores the source' do
+ report.set_source(source)
+
+ expect(report.source).to be_a(Gitlab::Ci::Reports::Sbom::Source)
+ end
+ end
+
+ describe '#add_component' do
+ let_it_be(:components) do
+ [
+ { 'type' => 'library', 'name' => 'component1', 'version' => 'v0.0.1' },
+ { 'type' => 'library', 'name' => 'component2', 'version' => 'v0.0.2' },
+ { 'type' => 'library', 'name' => 'component2' }
+ ]
+ end
+
+ it 'appends components to a list' do
+ components.each { |component| report.add_component(component) }
+
+ expect(report.components.size).to eq(3)
+ expect(report.components).to all(be_a(Gitlab::Ci::Reports::Sbom::Component))
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/sbom/reports_spec.rb b/spec/lib/gitlab/ci/reports/sbom/reports_spec.rb
new file mode 100644
index 00000000000..97d8d7abb33
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/sbom/reports_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Sbom::Reports do
+ subject(:reports_list) { described_class.new }
+
+ describe '#add_report' do
+ let(:rep1) { Gitlab::Ci::Reports::Sbom::Report.new }
+ let(:rep2) { Gitlab::Ci::Reports::Sbom::Report.new }
+
+ it 'appends the report to the report list' do
+ reports_list.add_report(rep1)
+ reports_list.add_report(rep2)
+
+ expect(reports_list.reports.length).to eq(2)
+ expect(reports_list.reports.first).to eq(rep1)
+ expect(reports_list.reports.last).to eq(rep2)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/sbom/source_spec.rb b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
new file mode 100644
index 00000000000..2d6434534a0
--- /dev/null
+++ b/spec/lib/gitlab/ci/reports/sbom/source_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Reports::Sbom::Source do
+ let(:attributes) do
+ {
+ 'type' => :dependency_scanning,
+ 'data' => {
+ 'category' => 'development',
+ 'input_file' => { 'path' => 'package-lock.json' },
+ 'source_file' => { 'path' => 'package.json' },
+ 'package_manager' => { 'name' => 'npm' },
+ 'language' => { 'name' => 'JavaScript' }
+ },
+ 'fingerprint' => '4dbcb747e6f0fb3ed4f48d96b777f1d64acdf43e459fdfefad404e55c004a188'
+ }
+ end
+
+ subject { described_class.new(attributes) }
+
+ it 'has correct attributes' do
+ expect(subject).to have_attributes(
+ source_type: attributes['type'],
+ data: attributes['data'],
+ fingerprint: attributes['fingerprint']
+ )
+ end
+end
diff --git a/spec/lib/gitlab/ci/reports/security/reports_spec.rb b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
index 79eee642552..e240edc4a12 100644
--- a/spec/lib/gitlab/ci/reports/security/reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/reports_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe Gitlab::Ci::Reports::Security::Reports do
let(:high_severity_dast) { build(:ci_reports_security_finding, severity: 'high', report_type: 'dast') }
let(:vulnerabilities_allowed) { 0 }
let(:severity_levels) { %w(critical high) }
- let(:vulnerability_states) { %w(newly_detected)}
+ let(:vulnerability_states) { %w(newly_detected) }
subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states) }
diff --git a/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb
index 44e66fd9028..6f75e2c55e8 100644
--- a/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/security/vulnerability_reports_comparer_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe Gitlab::Ci::Reports::Security::VulnerabilityReportsComparer do
end
describe '#added' do
- let(:new_location) {build(:ci_reports_security_locations_sast, :dynamic) }
+ let(:new_location) { build(:ci_reports_security_locations_sast, :dynamic) }
let(:vul_params) { vuln_params(project.id, [identifier], confidence: :high) }
let(:vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:critical], location: new_location, **vul_params) }
let(:low_vuln) { build(:ci_reports_security_finding, severity: Enums::Vulnerability.severity_levels[:low], location: new_location, **vul_params) }
diff --git a/spec/lib/gitlab/ci/reports/test_suite_spec.rb b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
index 1d6b39a7831..4a1f77bed65 100644
--- a/spec/lib/gitlab/ci/reports/test_suite_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_suite_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
subject { test_suite.with_attachment! }
context 'when test cases do not contain an attachment' do
- let(:test_case) { build(:report_test_case, :failed)}
+ let(:test_case) { build(:report_test_case, :failed) }
before do
test_suite.add_test_case(test_case)
@@ -103,7 +103,7 @@ RSpec.describe Gitlab::Ci::Reports::TestSuite do
end
context 'when test cases contain an attachment' do
- let(:test_case_with_attachment) { build(:report_test_case, :failed_with_attachment)}
+ let(:test_case_with_attachment) { build(:report_test_case, :failed_with_attachment) }
before do
test_suite.add_test_case(test_case_with_attachment)
diff --git a/spec/lib/gitlab/ci/runner_releases_spec.rb b/spec/lib/gitlab/ci/runner_releases_spec.rb
index 576eb02ad83..ad1e9b12b8a 100644
--- a/spec/lib/gitlab/ci/runner_releases_spec.rb
+++ b/spec/lib/gitlab/ci/runner_releases_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::RunnerReleases do
subject { described_class.instance }
- let(:runner_releases_url) { 'the release API URL' }
+ let(:runner_releases_url) { 'http://testurl.com/runner_public_releases' }
def releases
subject.releases
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Ci::RunnerReleases do
before do
subject.reset_backoff!
- stub_application_setting(public_runner_releases_url: runner_releases_url)
+ allow(subject).to receive(:runner_releases_url).and_return(runner_releases_url)
end
describe 'caching behavior', :use_clean_rails_memory_store_caching do
@@ -77,7 +77,8 @@ RSpec.describe Gitlab::Ci::RunnerReleases do
allow(Gitlab::HTTP).to receive(:get).with(runner_releases_url, anything) do
http_call_timestamp_offsets << Time.now.utc - start_time
- raise Net::OpenTimeout if opts&.dig(:raise_timeout)
+ err_class = opts&.dig(:raise_error)
+ raise err_class if err_class
mock_http_response(response)
end
@@ -113,12 +114,13 @@ RSpec.describe Gitlab::Ci::RunnerReleases do
end
context 'when request results in timeout' do
- let(:response) { }
+ let(:response) {}
let(:expected_releases) { nil }
let(:expected_releases_by_minor) { nil }
it_behaves_like 'requests that follow cache status', 5.seconds
- it_behaves_like 'a service implementing exponential backoff', raise_timeout: true
+ it_behaves_like 'a service implementing exponential backoff', raise_error: Net::OpenTimeout
+ it_behaves_like 'a service implementing exponential backoff', raise_error: Errno::ETIMEDOUT
end
context 'when response is nil' do
diff --git a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
index f2507a24b10..55c3834bfa7 100644
--- a/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
+++ b/spec/lib/gitlab/ci/runner_upgrade_check_spec.rb
@@ -5,36 +5,35 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
using RSpec::Parameterized::TableSyntax
- describe '#check_runner_upgrade_status' do
- subject(:result) { described_class.instance.check_runner_upgrade_status(runner_version) }
+ subject(:instance) { described_class.new(gitlab_version, runner_releases) }
+
+ describe '#check_runner_upgrade_suggestion' do
+ subject(:result) { instance.check_runner_upgrade_suggestion(runner_version) }
let(:gitlab_version) { '14.1.1' }
let(:parsed_runner_version) { ::Gitlab::VersionInfo.parse(runner_version, parse_suffix: true) }
-
- before do
- allow(described_class.instance).to receive(:gitlab_version)
- .and_return(::Gitlab::VersionInfo.parse(gitlab_version))
- end
+ let(:runner_releases) { instance_double(Gitlab::Ci::RunnerReleases) }
context 'with failing Gitlab::Ci::RunnerReleases request' do
let(:runner_version) { '14.1.123' }
- let(:runner_releases_double) { instance_double(Gitlab::Ci::RunnerReleases) }
before do
- allow(Gitlab::Ci::RunnerReleases).to receive(:instance).and_return(runner_releases_double)
- allow(runner_releases_double).to receive(:releases).and_return(nil)
+ allow(runner_releases).to receive(:releases).and_return(nil)
end
it 'returns :error' do
- is_expected.to eq({ error: parsed_runner_version })
+ is_expected.to eq([parsed_runner_version, :error])
end
end
context 'with available_runner_releases configured' do
- before do
- url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+ let(:runner_releases) { Gitlab::Ci::RunnerReleases.instance }
+ let(:runner_releases_url) do
+ ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+ end
- WebMock.stub_request(:get, url).to_return(
+ before do
+ WebMock.stub_request(:get, runner_releases_url).to_return(
body: available_runner_releases.map { |v| { name: v } }.to_json,
status: 200,
headers: { 'Content-Type' => 'application/json' }
@@ -53,7 +52,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:runner_version) { 'v14.0.1' }
it 'returns :not_available' do
- is_expected.to eq({ not_available: parsed_runner_version })
+ is_expected.to eq([parsed_runner_version, :not_available])
end
end
end
@@ -68,7 +67,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:runner_version) { nil }
it 'returns :invalid_version' do
- is_expected.to match({ invalid_version: anything })
+ is_expected.to match([anything, :invalid_version])
end
end
@@ -76,7 +75,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:runner_version) { 'junk' }
it 'returns :invalid_version' do
- is_expected.to match({ invalid_version: anything })
+ is_expected.to match([anything, :invalid_version])
end
end
@@ -87,7 +86,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:runner_version) { 'v14.2.0' }
it 'returns :not_available' do
- is_expected.to eq({ not_available: parsed_runner_version })
+ is_expected.to eq([parsed_runner_version, :not_available])
end
end
end
@@ -96,7 +95,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:gitlab_version) { '14.0.1' }
context 'with valid params' do
- where(:runner_version, :expected_result, :expected_suggested_version) do
+ where(:runner_version, :expected_status, :expected_suggested_version) do
'v15.0.0' | :not_available | '15.0.0' # not available since the GitLab instance is still on 14.x, a major version might be incompatible, and a patch upgrade is not available
'v14.1.0-rc3' | :recommended | '14.1.1' # recommended since even though the GitLab instance is still on 14.0.x, there is a patch release (14.1.1) available which might contain security fixes
'v14.1.0~beta.1574.gf6ea9389' | :recommended | '14.1.1' # suffixes are correctly handled
@@ -116,7 +115,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
end
with_them do
- it { is_expected.to eq({ expected_result => Gitlab::VersionInfo.parse(expected_suggested_version) }) }
+ it { is_expected.to eq([Gitlab::VersionInfo.parse(expected_suggested_version), expected_status]) }
end
end
end
@@ -125,7 +124,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:gitlab_version) { '13.9.0' }
context 'with valid params' do
- where(:runner_version, :expected_result, :expected_suggested_version) do
+ where(:runner_version, :expected_status, :expected_suggested_version) do
'v14.0.0' | :recommended | '14.0.2' # recommended upgrade since 14.0.2 is available, even though the GitLab instance is still on 13.x and a major version might be incompatible
'v13.10.1' | :not_available | '13.10.1' # not available since 13.10.1 is already ahead of GitLab instance version and is the latest patch update for 13.10.x
'v13.10.0' | :recommended | '13.10.1' # recommended upgrade since 13.10.1 is available
@@ -136,7 +135,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
end
with_them do
- it { is_expected.to eq({ expected_result => Gitlab::VersionInfo.parse(expected_suggested_version) }) }
+ it { is_expected.to eq([Gitlab::VersionInfo.parse(expected_suggested_version), expected_status]) }
end
end
end
@@ -152,7 +151,7 @@ RSpec.describe Gitlab::Ci::RunnerUpgradeCheck do
let(:runner_version) { '14.11.0~beta.29.gd0c550e3' }
it 'recommends 15.1.0 since 14.11 is an unknown release and 15.1.0 is available' do
- is_expected.to eq({ recommended: Gitlab::VersionInfo.new(15, 1, 0) })
+ is_expected.to eq([Gitlab::VersionInfo.new(15, 1, 0), :recommended])
end
end
end
diff --git a/spec/lib/gitlab/ci/status/bridge/common_spec.rb b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
index 30e6ad234a0..37524afc83d 100644
--- a/spec/lib/gitlab/ci/status/bridge/common_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/common_spec.rb
@@ -29,15 +29,7 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Common do
end
it { expect(subject).to have_details }
- it { expect(subject.details_path).to include "jobs/#{bridge.id}" }
-
- context 'with ci_retry_downstream_pipeline ff disabled' do
- before do
- stub_feature_flags(ci_retry_downstream_pipeline: false)
- end
-
- it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
- end
+ it { expect(subject.details_path).to include "pipelines/#{downstream_pipeline.id}" }
end
context 'when user does not have access to read downstream pipeline' do
diff --git a/spec/lib/gitlab/ci/status/build/canceled_spec.rb b/spec/lib/gitlab/ci/status/build/canceled_spec.rb
index e30a2211c8f..519b970ca5e 100644
--- a/spec/lib/gitlab/ci/status/build/canceled_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/canceled_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Canceled do
end
describe '.matches?' do
- subject {described_class.matches?(build, user) }
+ subject { described_class.matches?(build, user) }
context 'when build is canceled' do
let(:build) { create(:ci_build, :canceled) }
diff --git a/spec/lib/gitlab/ci/status/build/created_spec.rb b/spec/lib/gitlab/ci/status/build/created_spec.rb
index 49468674140..9738b3c1f36 100644
--- a/spec/lib/gitlab/ci/status/build/created_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/created_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Created do
end
describe '.matches?' do
- subject {described_class.matches?(build, user) }
+ subject { described_class.matches?(build, user) }
context 'when build is created' do
let(:build) { create(:ci_build, :created) }
diff --git a/spec/lib/gitlab/ci/status/build/manual_spec.rb b/spec/lib/gitlab/ci/status/build/manual_spec.rb
index 150705c1e36..a1152cb77e3 100644
--- a/spec/lib/gitlab/ci/status/build/manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/manual_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Manual do
end
describe '.matches?' do
- subject {described_class.matches?(build, user) }
+ subject { described_class.matches?(build, user) }
context 'when build is manual' do
let(:build) { create(:ci_build, :manual) }
diff --git a/spec/lib/gitlab/ci/status/build/pending_spec.rb b/spec/lib/gitlab/ci/status/build/pending_spec.rb
index 7b695d33877..b7dda9ce9c9 100644
--- a/spec/lib/gitlab/ci/status/build/pending_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/pending_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Pending do
end
describe '.matches?' do
- subject {described_class.matches?(build, user) }
+ subject { described_class.matches?(build, user) }
context 'when build is pending' do
let(:build) { create(:ci_build, :pending) }
diff --git a/spec/lib/gitlab/ci/status/build/skipped_spec.rb b/spec/lib/gitlab/ci/status/build/skipped_spec.rb
index 0b998a52a57..4437ac0089f 100644
--- a/spec/lib/gitlab/ci/status/build/skipped_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/skipped_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Skipped do
end
describe '.matches?' do
- subject {described_class.matches?(build, user) }
+ subject { described_class.matches?(build, user) }
context 'when build is skipped' do
let(:build) { create(:ci_build, :skipped) }
diff --git a/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb b/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb
index 91a9724d043..26087fd771c 100644
--- a/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb
+++ b/spec/lib/gitlab/ci/status/processable/waiting_for_resource_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Ci::Status::Processable::WaitingForResource do
end
describe '.matches?' do
- subject {described_class.matches?(processable, user) }
+ subject { described_class.matches?(processable, user) }
context 'when processable is waiting for resource' do
let(:processable) { create(:ci_build, :waiting_for_resource) }
diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
index 0f97bc06a4e..5ff179b6fee 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
@@ -36,9 +36,10 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:pipeline) { service.execute(merge_request).payload }
- it 'has no jobs' do
+ it 'creates a pipeline with the expected jobs' do
expect(pipeline).to be_merge_request_event
- expect(build_names).to be_empty
+ expect(pipeline.errors.full_messages).to be_empty
+ expect(build_names).to match_array(%w(kics-iac-sast))
end
end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index 78d3982a79f..1a909f52ec3 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
context 'when the project is set for deployment to AWS' do
let(:platform_value) { 'ECS' }
- let(:review_prod_build_names) { build_names.select {|n| n.include?('review') || n.include?('production')} }
+ let(:review_prod_build_names) { build_names.select { |n| n.include?('review') || n.include?('production') } }
before do
create(:ci_variable, project: project, key: 'AUTO_DEVOPS_PLATFORM_TARGET', value: platform_value)
diff --git a/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
index 1cd88034166..be29543676f 100644
--- a/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
+++ b/spec/lib/gitlab/ci/trace/remote_checksum_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Gitlab::Ci::Trace::RemoteChecksum do
end
context 'when the response does not include :content_md5' do
- let(:metadata) {{}}
+ let(:metadata) { {} }
it 'raises an exception' do
expect { subject }.to raise_error KeyError, /content_md5/
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::Ci::Trace::RemoteChecksum do
end
context 'when the response include :content_md5' do
- let(:metadata) {{ content_md5: base64checksum }}
+ let(:metadata) { { content_md5: base64checksum } }
it { is_expected.to eq(checksum) }
end
diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb
index 8ec0846bdca..6ab2089cce8 100644
--- a/spec/lib/gitlab/ci/variables/builder_spec.rb
+++ b/spec/lib/gitlab/ci/variables/builder_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Variables::Builder do
+ include Ci::TemplateHelpers
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let_it_be_with_reload(:pipeline) { create(:ci_pipeline, project: project) }
@@ -92,6 +93,8 @@ RSpec.describe Gitlab::Ci::Variables::Builder do
value: project.pages_url },
{ key: 'CI_API_V4_URL',
value: API::Helpers::Version.new('v4').root_url },
+ { key: 'CI_TEMPLATE_REGISTRY_HOST',
+ value: template_registry_host },
{ key: 'CI_PIPELINE_IID',
value: pipeline.iid.to_s },
{ key: 'CI_PIPELINE_SOURCE',
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index 26c560565e0..8ac03301322 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -302,6 +302,7 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
.append(key: 'CI_BUILD_ID', value: '1')
.append(key: 'RAW_VAR', value: '$TEST1', raw: true)
.append(key: 'TEST1', value: 'test-3')
+ .append(key: 'FILEVAR1', value: 'file value 1', file: true)
end
context 'table tests' do
@@ -311,28 +312,23 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
{
"empty value": {
value: '',
- result: '',
- keep_undefined: false
+ result: ''
},
"simple expansions": {
value: 'key$TEST1-$CI_BUILD_ID',
- result: 'keytest-3-1',
- keep_undefined: false
+ result: 'keytest-3-1'
},
"complex expansion": {
value: 'key${TEST1}-${CI_JOB_NAME}',
- result: 'keytest-3-test-1',
- keep_undefined: false
+ result: 'keytest-3-test-1'
},
"complex expansions with raw variable": {
value: 'key${RAW_VAR}-${CI_JOB_NAME}',
- result: 'key$TEST1-test-1',
- keep_undefined: false
+ result: 'key$TEST1-test-1'
},
"missing variable not keeping original": {
value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
- result: 'key-test-1',
- keep_undefined: false
+ result: 'key-test-1'
},
"missing variable keeping original": {
value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
@@ -341,14 +337,24 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
},
"escaped characters are kept intact": {
value: 'key-$TEST1-%%HOME%%-$${HOME}',
- result: 'key-test-3-%%HOME%%-$${HOME}',
- keep_undefined: false
+ result: 'key-test-3-%%HOME%%-$${HOME}'
+ },
+ "file variable with expand_file_vars: true": {
+ value: 'key-$FILEVAR1-$TEST1',
+ result: 'key-file value 1-test-3'
+ },
+ "file variable with expand_file_vars: false": {
+ value: 'key-$FILEVAR1-$TEST1',
+ result: 'key-$FILEVAR1-test-3',
+ expand_file_vars: false
}
}
end
with_them do
- subject { collection.expand_value(value, keep_undefined: keep_undefined) }
+ let(:options) { { keep_undefined: keep_undefined, expand_file_vars: expand_file_vars }.compact }
+
+ subject(:result) { collection.expand_value(value, **options) }
it 'matches expected expansion' do
is_expected.to eq(result)
diff --git a/spec/lib/gitlab/ci/variables/helpers_spec.rb b/spec/lib/gitlab/ci/variables/helpers_spec.rb
index f13b334c10e..2a1cdaeb3a7 100644
--- a/spec/lib/gitlab/ci/variables/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/variables/helpers_spec.rb
@@ -15,21 +15,27 @@ RSpec.describe Gitlab::Ci::Variables::Helpers do
end
let(:result) do
- [{ key: 'key1', value: 'value1', public: true },
- { key: 'key2', value: 'value22', public: true },
- { key: 'key3', value: 'value3', public: true }]
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
end
subject { described_class.merge_variables(current_variables, new_variables) }
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
context 'when new variables is a hash' do
let(:new_variables) do
{ 'key2' => 'value22', 'key3' => 'value3' }
end
- it { is_expected.to eq(result) }
+ let(:result) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
+ end
+
+ it { is_expected.to match_array(result) }
end
context 'when new variables is a hash with symbol keys' do
@@ -37,67 +43,72 @@ RSpec.describe Gitlab::Ci::Variables::Helpers do
{ key2: 'value22', key3: 'value3' }
end
- it { is_expected.to eq(result) }
+ let(:result) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
+ end
+
+ it { is_expected.to match_array(result) }
end
context 'when new variables is nil' do
let(:new_variables) {}
let(:result) do
- [{ key: 'key1', value: 'value1', public: true },
- { key: 'key2', value: 'value2', public: true }]
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value2' }]
end
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
end
end
- describe '.transform_to_yaml_variables' do
- let(:variables) do
- { 'key1' => 'value1', 'key2' => 'value2' }
- end
+ describe '.transform_to_array' do
+ subject { described_class.transform_to_array(variables) }
- let(:result) do
- [{ key: 'key1', value: 'value1', public: true },
- { key: 'key2', value: 'value2', public: true }]
- end
+ context 'when values are strings' do
+ let(:variables) do
+ { 'key1' => 'value1', 'key2' => 'value2' }
+ end
- subject { described_class.transform_to_yaml_variables(variables) }
+ let(:result) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value2' }]
+ end
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
+ end
context 'when variables is nil' do
let(:variables) {}
- it { is_expected.to eq([]) }
- end
- end
-
- describe '.transform_from_yaml_variables' do
- let(:variables) do
- [{ key: 'key1', value: 'value1', public: true },
- { key: 'key2', value: 'value2', public: true }]
+ it { is_expected.to match_array([]) }
end
- let(:result) do
- { 'key1' => 'value1', 'key2' => 'value2' }
- end
+ context 'when values are hashes' do
+ let(:variables) do
+ { 'key1' => { value: 'value1', description: 'var 1' }, 'key2' => { value: 'value2' } }
+ end
- subject { described_class.transform_from_yaml_variables(variables) }
+ let(:result) do
+ [{ key: 'key1', value: 'value1', description: 'var 1' },
+ { key: 'key2', value: 'value2' }]
+ end
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
- context 'when variables is nil' do
- let(:variables) {}
+ context 'when a value data has `key` as a key' do
+ let(:variables) do
+ { 'key1' => { value: 'value1', key: 'new_key1' }, 'key2' => { value: 'value2' } }
+ end
- it { is_expected.to eq({}) }
- end
+ let(:result) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value2' }]
+ end
- context 'when variables is a hash' do
- let(:variables) do
- { key1: 'value1', 'key2' => 'value2' }
+ it { is_expected.to match_array(result) }
end
-
- it { is_expected.to eq(result) }
end
end
@@ -115,35 +126,35 @@ RSpec.describe Gitlab::Ci::Variables::Helpers do
let(:inheritance) { true }
let(:result) do
- [{ key: 'key1', value: 'value1', public: true },
- { key: 'key2', value: 'value22', public: true },
- { key: 'key3', value: 'value3', public: true }]
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
end
subject { described_class.inherit_yaml_variables(from: from, to: to, inheritance: inheritance) }
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
context 'when inheritance is false' do
let(:inheritance) { false }
let(:result) do
- [{ key: 'key2', value: 'value22', public: true },
- { key: 'key3', value: 'value3', public: true }]
+ [{ key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
end
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
end
context 'when inheritance is array' do
let(:inheritance) { ['key2'] }
let(:result) do
- [{ key: 'key2', value: 'value22', public: true },
- { key: 'key3', value: 'value3', public: true }]
+ [{ key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
end
- it { is_expected.to eq(result) }
+ it { is_expected.to match_array(result) }
end
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
index 8416501e949..f7a0905d9da 100644
--- a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
@@ -72,8 +72,8 @@ module Gitlab
it 'returns calculated variables with root and job variables' do
is_expected.to match_array([
- { key: 'VAR1', value: 'value 11', public: true },
- { key: 'VAR2', value: 'value 2', public: true }
+ { key: 'VAR1', value: 'value 11' },
+ { key: 'VAR2', value: 'value 2' }
])
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 22bc6b0db59..3477fe837b4 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -448,7 +448,7 @@ module Gitlab
it 'parses the root:variables as #root_variables' do
expect(subject.root_variables)
- .to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
+ .to contain_exactly({ key: 'SUPPORTED', value: 'parsed' })
end
end
@@ -490,7 +490,7 @@ module Gitlab
it 'parses the root:variables as #root_variables' do
expect(subject.root_variables)
- .to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
+ .to contain_exactly({ key: 'SUPPORTED', value: 'parsed' })
end
end
@@ -1098,8 +1098,8 @@ module Gitlab
it 'returns job variables' do
expect(job_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1', public: true },
- { key: 'VAR2', value: 'value2', public: true }
+ { key: 'VAR1', value: 'value1' },
+ { key: 'VAR2', value: 'value2' }
)
expect(root_variables_inheritance).to eq(true)
end
@@ -1203,21 +1203,21 @@ module Gitlab
expect(config_processor.builds[0]).to include(
name: 'test1',
options: { script: ['test'] },
- job_variables: [{ key: 'VAR1', value: 'test1 var 1', public: true },
- { key: 'VAR2', value: 'test2 var 2', public: true }]
+ job_variables: [{ key: 'VAR1', value: 'test1 var 1' },
+ { key: 'VAR2', value: 'test2 var 2' }]
)
expect(config_processor.builds[1]).to include(
name: 'test2',
options: { script: ['test'] },
- job_variables: [{ key: 'VAR1', value: 'base var 1', public: true },
- { key: 'VAR2', value: 'test2 var 2', public: true }]
+ job_variables: [{ key: 'VAR1', value: 'base var 1' },
+ { key: 'VAR2', value: 'test2 var 2' }]
)
expect(config_processor.builds[2]).to include(
name: 'test3',
options: { script: ['test'] },
- job_variables: [{ key: 'VAR1', value: 'base var 1', public: true }]
+ job_variables: [{ key: 'VAR1', value: 'base var 1' }]
)
expect(config_processor.builds[3]).to include(
@@ -1425,7 +1425,7 @@ module Gitlab
it 'returns the parallel config' do
build_options = builds.map { |build| build[:options] }
parallel_config = {
- matrix: parallel[:matrix].map { |var| var.transform_values { |v| Array(v).flatten }},
+ matrix: parallel[:matrix].map { |var| var.transform_values { |v| Array(v).flatten } },
total: build_options.size
}
@@ -1766,6 +1766,7 @@ module Gitlab
script: ["make changelog | tee release_changelog.txt"],
release: {
tag_name: "$CI_COMMIT_TAG",
+ tag_message: "Annotated tag message",
name: "Release $CI_TAG_NAME",
description: "./release_changelog.txt",
ref: 'b3235930aa443112e639f941c69c578912189bdd',
@@ -1956,7 +1957,7 @@ module Gitlab
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
context 'no dependencies' do
- let(:dependencies) { }
+ let(:dependencies) {}
it { is_expected.to be_valid }
end
@@ -2012,8 +2013,8 @@ module Gitlab
end
describe "Job Needs" do
- let(:needs) { }
- let(:dependencies) { }
+ let(:needs) {}
+ let(:dependencies) {}
let(:config) do
{
@@ -2893,7 +2894,7 @@ module Gitlab
end
end
- describe 'Rules' do
+ describe 'Job rules' do
context 'changes' do
let(:config) do
<<~YAML
@@ -2938,6 +2939,49 @@ module Gitlab
end
end
+ describe 'Workflow rules' do
+ context 'changes' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ rules:
+ - changes: [README.md]
+
+ rspec:
+ script: exit 0
+ YAML
+ end
+
+ it 'returns pipeline with correct rules' do
+ expect(processor.builds.size).to eq(1)
+ expect(processor.workflow_rules).to eq(
+ [{ changes: { paths: ["README.md"] } }]
+ )
+ end
+
+ context 'with paths' do
+ let(:config) do
+ <<~YAML
+ workflow:
+ rules:
+ - changes:
+ paths: [README.md]
+
+ rspec:
+ script: exit 0
+ YAML
+ end
+
+ it 'returns pipeline with correct rules' do
+ expect(processor.builds.size).to eq(1)
+ expect(processor.workflow_rules).to eq(
+ [{ changes: { paths: ["README.md"] } }]
+ )
+ end
+ end
+ end
+ end
+
describe '#execute' do
subject { Gitlab::Ci::YamlProcessor.new(content).execute }
diff --git a/spec/lib/gitlab/composer/cache_spec.rb b/spec/lib/gitlab/composer/cache_spec.rb
index 071771960c6..a4d632da848 100644
--- a/spec/lib/gitlab/composer/cache_spec.rb
+++ b/spec/lib/gitlab/composer/cache_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Gitlab::Composer::Cache do
cache_file = Packages::Composer::CacheFile.last
freeze_time do
- expect { subject }.to change { cache_file.reload.delete_at}.from(nil).to(1.day.from_now)
+ expect { subject }.to change { cache_file.reload.delete_at }.from(nil).to(1.day.from_now)
end
end
end
diff --git a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
index 7173ea43450..0e7d7f1efda 100644
--- a/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
+++ b/spec/lib/gitlab/cycle_analytics/stage_summary_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::CycleAnalytics::StageSummary do
+ include CycleAnalyticsHelpers
+
let_it_be(:project) { create(:project, :repository) }
let(:options) { { from: 1.day.ago } }
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 9cee0802e87..2c239d5868a 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::DataBuilder::Build do
let!(:tag_names) { %w(tag-1 tag-2) }
- let(:runner) { create(:ci_runner, :instance, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
+ let(:runner) { create(:ci_runner, :instance, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n) }) }
let(:user) { create(:user, :public_email) }
let(:build) { create(:ci_build, :running, runner: runner, user: user) }
@@ -33,6 +33,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:project_id]).to eq(build.project.id) }
it { expect(data[:project_name]).to eq(build.project.full_name) }
it { expect(data[:pipeline_id]).to eq(build.pipeline.id) }
+
it {
expect(data[:user]).to eq(
{
@@ -43,6 +44,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
email: user.email
})
}
+
it { expect(data[:commit][:id]).to eq(build.pipeline.id) }
it { expect(data[:runner][:id]).to eq(build.runner.id) }
it { expect(data[:runner][:tags]).to match_array(tag_names) }
diff --git a/spec/lib/gitlab/data_builder/issuable_spec.rb b/spec/lib/gitlab/data_builder/issuable_spec.rb
index c1ae65c160f..f0802f335f4 100644
--- a/spec/lib/gitlab/data_builder/issuable_spec.rb
+++ b/spec/lib/gitlab/data_builder/issuable_spec.rb
@@ -113,6 +113,7 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
expect(data[:object_attributes]['assignee_id']).to eq(user.id)
expect(data[:assignees].first).to eq(user.hook_attrs)
expect(data).not_to have_key(:assignee)
+ expect(data).not_to have_key(:reviewers)
end
end
@@ -126,5 +127,25 @@ RSpec.describe Gitlab::DataBuilder::Issuable do
expect(data).not_to have_key(:assignee)
end
end
+
+ context 'merge_request is assigned reviewers' do
+ let(:merge_request) { create(:merge_request, reviewers: [user]) }
+ let(:data) { described_class.new(merge_request).build(user: user) }
+
+ it 'returns correct hook data' do
+ expect(data[:object_attributes]['reviewer_ids']).to match_array([user.id])
+ expect(data[:reviewers].first).to eq(user.hook_attrs)
+ end
+ end
+
+ context 'when merge_request does not have reviewers and assignees' do
+ let(:merge_request) { create(:merge_request) }
+ let(:data) { described_class.new(merge_request).build(user: user) }
+
+ it 'returns correct hook data' do
+ expect(data).not_to have_key(:assignees)
+ expect(data).not_to have_key(:reviewers)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index 469812c80fc..86a1539a836 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
context 'build with runner' do
let_it_be(:tag_names) { %w(tag-1 tag-2) }
- let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n)}) }
+ let_it_be(:ci_runner) { create(:ci_runner, tag_list: tag_names.map { |n| ActsAsTaggableOn::Tag.create!(name: n) }) }
let_it_be(:build) { create(:ci_build, pipeline: pipeline, runner: ci_runner) }
it 'has runner attributes', :aggregate_failures do
diff --git a/spec/lib/gitlab/data_builder/push_spec.rb b/spec/lib/gitlab/data_builder/push_spec.rb
index 7eb81a880bf..a3dd4e49e83 100644
--- a/spec/lib/gitlab/data_builder/push_spec.rb
+++ b/spec/lib/gitlab/data_builder/push_spec.rb
@@ -67,6 +67,7 @@ RSpec.describe Gitlab::DataBuilder::Push do
it { expect(data[:project_id]).to eq(15) }
it { expect(data[:commits].size).to eq(1) }
it { expect(data[:total_commits_count]).to eq(1) }
+
it 'contains project data' do
expect(data[:project]).to be_a(Hash)
expect(data[:project][:id]).to eq(15)
diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
new file mode 100644
index 00000000000..adb0f45706d
--- /dev/null
+++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
+ include ExclusiveLeaseHelpers
+
+ describe '#perform' do
+ subject { described_class.new(async_index) }
+
+ let(:async_index) { create(:postgres_async_index, :with_drop) }
+
+ let(:index_model) { Gitlab::Database::AsyncIndexes::PostgresAsyncIndex }
+
+ let(:model) { Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME] }
+ let(:connection) { model.connection }
+
+ let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
+ let(:lease_key) { "gitlab/database/async_indexes/index_destructor/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
+
+ before do
+ connection.add_index(async_index.table_name, 'id', name: async_index.name)
+ end
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ context 'when the index does not exist' do
+ before do
+ connection.execute(async_index.definition)
+ end
+
+ it 'skips index destruction' do
+ expect(connection).not_to receive(:execute).with(/DROP INDEX/)
+
+ subject.perform
+ end
+ end
+
+ it 'creates the index while controlling lock timeout' do
+ allow(connection).to receive(:execute).and_call_original
+ expect(connection).to receive(:execute).with("SET lock_timeout TO '60000ms'").and_call_original
+ expect(connection).to receive(:execute).with(async_index.definition).and_call_original
+ expect(connection).to receive(:execute)
+ .with("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
+ .and_call_original
+
+ subject.perform
+ end
+
+ it 'removes the index preparation record from postgres_async_indexes' do
+ expect(async_index).to receive(:destroy).and_call_original
+
+ expect { subject.perform }.to change { index_model.count }.by(-1)
+ end
+
+ it 'skips logic if not able to acquire exclusive lease' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(connection).not_to receive(:execute).with(/DROP INDEX/)
+ expect(async_index).not_to receive(:destroy)
+
+ expect { subject.perform }.not_to change { index_model.count }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
index 9ba3dad72b3..52f5e37eff2 100644
--- a/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/migration_helpers_spec.rb
@@ -142,4 +142,42 @@ RSpec.describe Gitlab::Database::AsyncIndexes::MigrationHelpers do
end
end
end
+
+ describe '#prepare_async_index_removal' do
+ before do
+ connection.create_table(table_name)
+ connection.add_index(table_name, 'id', name: index_name)
+ end
+
+ it 'creates the record for the async index removal' do
+ expect do
+ migration.prepare_async_index_removal(table_name, 'id', name: index_name)
+ end.to change { index_model.where(name: index_name).count }.by(1)
+
+ record = index_model.find_by(name: index_name)
+
+ expect(record.table_name).to eq(table_name)
+ expect(record.definition).to match(/DROP INDEX CONCURRENTLY "#{index_name}"/)
+ end
+
+ context 'when the index does not exist' do
+ it 'does not create the record' do
+ connection.remove_index(table_name, 'id', name: index_name)
+
+ expect do
+ migration.prepare_async_index_removal(table_name, 'id', name: index_name)
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+
+ context 'when the record already exists' do
+ it 'does attempt to create the record' do
+ create(:postgres_async_index, table_name: table_name, name: index_name)
+
+ expect do
+ migration.prepare_async_index_removal(table_name, 'id', name: index_name)
+ end.not_to change { index_model.where(name: index_name).count }
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
index 223730f87c0..806d57af4b3 100644
--- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -16,4 +16,21 @@ RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model
it { is_expected.to validate_presence_of(:definition) }
it { is_expected.to validate_length_of(:definition).is_at_most(definition_limit) }
end
+
+ describe 'scopes' do
+ let!(:async_index_creation) { create(:postgres_async_index) }
+ let!(:async_index_destruction) { create(:postgres_async_index, :with_drop) }
+
+ describe '.to_create' do
+ subject { described_class.to_create }
+
+ it { is_expected.to contain_exactly(async_index_creation) }
+ end
+
+ describe '.to_drop' do
+ subject { described_class.to_drop }
+
+ it { is_expected.to contain_exactly(async_index_destruction) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/async_indexes_spec.rb b/spec/lib/gitlab/database/async_indexes_spec.rb
index 74e30ea2c4e..8a5509f892f 100644
--- a/spec/lib/gitlab/database/async_indexes_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes do
end
it 'takes 2 pending indexes and creates those' do
- Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.order(:id).limit(2).each do |index|
+ Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_create.order(:id).limit(2).each do |index|
creator = double('index creator')
expect(Gitlab::Database::AsyncIndexes::IndexCreator).to receive(:new).with(index).and_return(creator)
expect(creator).to receive(:perform)
@@ -20,4 +20,22 @@ RSpec.describe Gitlab::Database::AsyncIndexes do
subject
end
end
+
+ describe '.drop_pending_indexes!' do
+ subject { described_class.drop_pending_indexes! }
+
+ before do
+ create_list(:postgres_async_index, 4, :with_drop)
+ end
+
+ it 'takes 2 pending indexes and destroys those' do
+ Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.to_drop.order(:id).limit(2).each do |index|
+ destructor = double('index destructor')
+ expect(Gitlab::Database::AsyncIndexes::IndexDestructor).to receive(:new).with(index).and_return(destructor)
+ expect(destructor).to receive(:perform)
+ end
+
+ subject
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
index a7b3670da7c..32746a46308 100644
--- a/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_job_spec.rb
@@ -304,6 +304,13 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
it { expect(subject).to be_falsey }
end
+
+ context 'when the batch_size is 1' do
+ let(:job) { create(:batched_background_migration_job, :failed, batch_size: 1) }
+ let(:exception) { ActiveRecord::StatementTimeout.new }
+
+ it { expect(subject).to be_falsey }
+ end
end
describe '#time_efficiency' do
@@ -415,10 +422,18 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedJob, type: :model d
end
context 'when batch size is already 1' do
- let!(:job) { create(:batched_background_migration_job, :failed, batch_size: 1) }
+ let!(:job) { create(:batched_background_migration_job, :failed, batch_size: 1, attempts: 3) }
- it 'raises an exception' do
- expect { job.split_and_retry! }.to raise_error 'Job cannot be split further'
+ it 'keeps the same batch size' do
+ job.split_and_retry!
+
+ expect(job.reload.batch_size).to eq 1
+ end
+
+ it 'resets the number of attempts' do
+ job.split_and_retry!
+
+ expect(job.attempts).to eq 0
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
index b8ff78be333..4ef2e7f936b 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
before do
- allow(Gitlab::Database::BackgroundMigration::HealthStatus).to receive(:evaluate)
- .and_return(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ normal_signal = instance_double(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal, stop?: false)
+ allow(Gitlab::Database::BackgroundMigration::HealthStatus).to receive(:evaluate).and_return([normal_signal])
end
describe '#run_migration_job' do
@@ -77,14 +77,14 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
it 'puts migration on hold on stop signal' do
- expect(health_status).to receive(:evaluate).and_return(stop_signal)
+ expect(health_status).to receive(:evaluate).and_return([stop_signal])
expect { runner.run_migration_job(migration) }.to change { migration.on_hold? }
.from(false).to(true)
end
it 'optimizes migration on normal signal' do
- expect(health_status).to receive(:evaluate).and_return(normal_signal)
+ expect(health_status).to receive(:evaluate).and_return([normal_signal])
expect(migration).to receive(:optimize!)
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
it 'optimizes migration on no signal' do
- expect(health_status).to receive(:evaluate).and_return(not_available_signal)
+ expect(health_status).to receive(:evaluate).and_return([not_available_signal])
expect(migration).to receive(:optimize!)
@@ -100,7 +100,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
end
it 'optimizes migration on unknown signal' do
- expect(health_status).to receive(:evaluate).and_return(unknown_signal)
+ expect(health_status).to receive(:evaluate).and_return([unknown_signal])
expect(migration).to receive(:optimize!)
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index 55f607c0cb0..06c2bc32db3 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -307,7 +307,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
describe '#batch_class' do
- let(:batch_class) { Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy}
+ let(:batch_class) { Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy }
let(:batched_migration) { build(:batched_background_migration) }
it 'returns the class of the batch strategy for the migration' do
@@ -617,6 +617,49 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '#progress' do
+ subject { migration.progress }
+
+ context 'when the migration is finished' do
+ let(:migration) do
+ create(:batched_background_migration, :finished, total_tuple_count: 1).tap do |record|
+ create(:batched_background_migration_job, :succeeded, batched_migration: record, batch_size: 1)
+ end
+ end
+
+ it 'returns 100' do
+ expect(subject).to be 100
+ end
+ end
+
+ context 'when the migration does not have jobs' do
+ let(:migration) { create(:batched_background_migration, :active) }
+
+ it 'returns zero' do
+ expect(subject).to be 0
+ end
+ end
+
+ context 'when the `total_tuple_count` is zero' do
+ let(:migration) { create(:batched_background_migration, :active, total_tuple_count: 0) }
+ let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: migration) }
+
+ it 'returns nil' do
+ expect(subject).to be nil
+ end
+ end
+
+ context 'when migration has completed jobs' do
+ let(:migration) { create(:batched_background_migration, :active, total_tuple_count: 100) }
+
+ let!(:batched_job) { create(:batched_background_migration_job, :succeeded, batched_migration: migration, batch_size: 8) }
+
+ it 'calculates the progress' do
+ expect(subject).to be 8
+ end
+ end
+ end
+
describe '.for_configuration' do
let!(:attributes) do
{
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 83c0275a870..983f482d464 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -38,10 +38,11 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
batch_column: 'id',
sub_batch_size: 1,
pause_ms: pause_ms,
+ job_arguments: active_migration.job_arguments,
connection: connection)
.and_return(job_instance)
- expect(job_instance).to receive(:perform).with('id', 'other_id')
+ expect(job_instance).to receive(:perform).with(no_args)
perform
end
@@ -49,7 +50,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it 'updates the tracking record in the database' do
test_metrics = { 'my_metrics' => 'some value' }
- expect(job_instance).to receive(:perform).with('id', 'other_id')
+ expect(job_instance).to receive(:perform).with(no_args)
expect(job_instance).to receive(:batch_metrics).and_return(test_metrics)
freeze_time do
@@ -78,7 +79,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it 'increments attempts and updates other fields' do
updated_metrics = { 'updated_metrics' => 'some_value' }
- expect(job_instance).to receive(:perform).with('id', 'other_id')
+ expect(job_instance).to receive(:perform).with(no_args)
expect(job_instance).to receive(:batch_metrics).and_return(updated_metrics)
freeze_time do
@@ -97,7 +98,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
- expect(job_instance).to receive(:perform).with('id', 'other_id')
+ expect(job_instance).to receive(:perform).with(no_args)
freeze_time do
perform
@@ -110,7 +111,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
it 'tracks metrics of the execution' do
- expect(job_instance).to receive(:perform).with('id', 'other_id')
+ expect(job_instance).to receive(:perform).with(no_args)
expect(metrics_tracker).to receive(:track).with(job_record)
perform
@@ -120,7 +121,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job raises an error' do
shared_examples 'an error is raised' do |error_class|
it 'marks the tracking record as failed' do
- expect(job_instance).to receive(:perform).with('id', 'other_id').and_raise(error_class)
+ expect(job_instance).to receive(:perform).with(no_args).and_raise(error_class)
freeze_time do
expect { perform }.to raise_error(error_class)
@@ -133,7 +134,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
end
it 'tracks metrics of the execution' do
- expect(job_instance).to receive(:perform).with('id', 'other_id').and_raise(error_class)
+ expect(job_instance).to receive(:perform).with(no_args).and_raise(error_class)
expect(metrics_tracker).to receive(:track).with(job_record)
expect { perform }.to raise_error(error_class)
@@ -147,6 +148,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the batched background migration does not inherit from BatchedMigrationJob' do
let(:job_class) { Class.new }
+ let(:job_instance) { job_class.new }
it 'runs the job with the correct arguments' do
expect(job_class).to receive(:new).with(no_args).and_return(job_instance)
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
index 21204814f17..db4383a79d4 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -20,9 +20,9 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
swapout_view_for_table(:postgres_autovacuum_activity)
end
- let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(tables) }
let(:tables) { [table] }
let(:table) { 'users' }
+ let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
context 'without autovacuum activity' do
it 'returns Normal signal' do
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb
new file mode 100644
index 00000000000..650f11e3cd5
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog do
+ let(:connection) { Gitlab::Database.database_base_models[:main].connection }
+
+ around do |example|
+ Gitlab::Database::SharedModel.using_connection(connection) do
+ example.run
+ end
+ end
+
+ describe '#evaluate' do
+ let(:tables) { [table] }
+ let(:table) { 'users' }
+ let(:context) { Gitlab::Database::BackgroundMigration::HealthStatus::Context.new(connection, tables) }
+
+ subject(:evaluate) { described_class.new(context).evaluate }
+
+ it 'remembers the indicator class' do
+ expect(evaluate.indicator_class).to eq(described_class)
+ end
+
+ it 'returns NoSignal signal in case the feature flag is disabled' do
+ stub_feature_flags(batched_migrations_health_status_wal: false)
+
+ expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate.reason).to include('indicator disabled')
+ end
+
+ it 'returns NoSignal signal when WAL archive queue can not be calculated' do
+ expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => nil }])
+
+ expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::NotAvailable)
+ expect(evaluate.reason).to include('WAL archive queue can not be calculated')
+ end
+
+ it 'uses primary database' do
+ expect(Gitlab::Database::LoadBalancing::Session.current).to receive(:use_primary).and_yield
+
+ evaluate
+ end
+
+ context 'when WAL archive queue size is below the limit' do
+ it 'returns Normal signal' do
+ expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 1 }])
+ expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Normal)
+ expect(evaluate.reason).to include('WAL archive queue is within limit')
+ end
+ end
+
+ context 'when WAL archive queue size is above the limit' do
+ it 'returns Stop signal' do
+ expect(connection).to receive(:execute).and_return([{ 'pending_wal_count' => 420 }])
+ expect(evaluate).to be_a(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Stop)
+ expect(evaluate.reason).to include('WAL archive queue is too big')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/health_status_spec.rb b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
index 6d0430dcbbb..8bc04d80fa1 100644
--- a/spec/lib/gitlab/database/background_migration/health_status_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/health_status_spec.rb
@@ -12,30 +12,47 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
end
describe '.evaluate' do
- subject(:evaluate) { described_class.evaluate(migration, indicator_class) }
+ subject(:evaluate) { described_class.evaluate(migration, [autovacuum_indicator_class]) }
let(:migration) { build(:batched_background_migration, :active) }
- let(:health_status) { 'Gitlab::Database::BackgroundMigration::HealthStatus' }
- let(:indicator_class) { class_double("#{health_status}::Indicators::AutovacuumActiveOnTable") }
- let(:indicator) { instance_double("#{health_status}::Indicators::AutovacuumActiveOnTable") }
+ let(:health_status) { Gitlab::Database::BackgroundMigration::HealthStatus }
+ let(:autovacuum_indicator_class) { health_status::Indicators::AutovacuumActiveOnTable }
+ let(:wal_indicator_class) { health_status::Indicators::WriteAheadLog }
+ let(:autovacuum_indicator) { instance_double(autovacuum_indicator_class) }
+ let(:wal_indicator) { instance_double(wal_indicator_class) }
before do
- allow(indicator_class).to receive(:new).with(migration.health_context).and_return(indicator)
+ allow(autovacuum_indicator_class).to receive(:new).with(migration.health_context).and_return(autovacuum_indicator)
end
- it 'returns a signal' do
+ context 'with default indicators' do
+ subject(:evaluate) { described_class.evaluate(migration) }
+
+ it 'returns a collection of signals' do
+ normal_signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
+ not_available_signal = instance_double("#{health_status}::Signals::NotAvailable", log_info?: false)
+
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(normal_signal)
+ expect(wal_indicator_class).to receive(:new).with(migration.health_context).and_return(wal_indicator)
+ expect(wal_indicator).to receive(:evaluate).and_return(not_available_signal)
+
+ expect(evaluate).to contain_exactly(normal_signal, not_available_signal)
+ end
+ end
+
+ it 'returns a collection of signals' do
signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
- expect(indicator).to receive(:evaluate).and_return(signal)
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
- expect(evaluate).to eq(signal)
+ expect(evaluate).to contain_exactly(signal)
end
it 'logs interesting signals' do
signal = instance_double("#{health_status}::Signals::Stop", log_info?: true)
- expect(indicator).to receive(:evaluate).and_return(signal)
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
expect(described_class).to receive(:log_signal).with(signal, migration)
evaluate
@@ -44,7 +61,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
it 'does not log signals of no interest' do
signal = instance_double("#{health_status}::Signals::Normal", log_info?: false)
- expect(indicator).to receive(:evaluate).and_return(signal)
+ expect(autovacuum_indicator).to receive(:evaluate).and_return(signal)
expect(described_class).not_to receive(:log_signal)
evaluate
@@ -54,7 +71,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
let(:error) { RuntimeError.new('everything broken') }
before do
- expect(indicator).to receive(:evaluate).and_raise(error)
+ expect(autovacuum_indicator).to receive(:evaluate).and_raise(error)
end
it 'does not fail' do
@@ -62,8 +79,10 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus do
end
it 'returns Unknown signal' do
- expect(evaluate).to be_an_instance_of(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
- expect(evaluate.reason).to eq("unexpected error: everything broken (RuntimeError)")
+ signal = evaluate.first
+
+ expect(signal).to be_an_instance_of(Gitlab::Database::BackgroundMigration::HealthStatus::Signals::Unknown)
+ expect(signal.reason).to eq("unexpected error: everything broken (RuntimeError)")
end
it 'reports the exception to error tracking' do
diff --git a/spec/lib/gitlab/database/bulk_update_spec.rb b/spec/lib/gitlab/database/bulk_update_spec.rb
index 08b4d50f83b..fa519cffd6b 100644
--- a/spec/lib/gitlab/database/bulk_update_spec.rb
+++ b/spec/lib/gitlab/database/bulk_update_spec.rb
@@ -91,7 +91,8 @@ RSpec.describe Gitlab::Database::BulkUpdate do
.to eq(['MR a', 'Issue a', 'Issue b'])
end
- context 'validates prepared_statements support', :reestablished_active_record_base do
+ context 'validates prepared_statements support', :reestablished_active_record_base,
+ :suppress_gitlab_schemas_validate_connection do
using RSpec::Parameterized::TableSyntax
where(:prepared_statements) do
diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
index 34eb64997c1..9c09253b24c 100644
--- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb
@@ -358,7 +358,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'returns true for deeply wrapped/nested errors' do
- top = twice_wrapped_exception(ActionView::Template::Error, ActiveRecord::StatementInvalid, ActiveRecord::ConnectionNotEstablished)
+ top = twice_wrapped_exception(
+ ActionView::Template::Error,
+ ActiveRecord::StatementInvalid,
+ ActiveRecord::ConnectionNotEstablished
+ )
expect(lb.connection_error?(top)).to eq(true)
end
@@ -404,7 +408,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
describe '#select_up_to_date_host' do
- let(:location) { 'AB/12345'}
+ let(:location) { 'AB/12345' }
let(:hosts) { lb.host_list.hosts }
let(:set_host) { request_cache[described_class::CACHE_KEY] }
@@ -455,7 +459,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
end
it 'does not modify connection class pool' do
- expect { with_replica_pool(5) { } }.not_to change { ActiveRecord::Base.connection_pool }
+ expect { with_replica_pool(5) {} }.not_to change { ActiveRecord::Base.connection_pool }
end
def with_replica_pool(*args)
diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
index b768d4ecea3..a1c141af537 100644
--- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb
@@ -30,6 +30,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do
expect(app).to receive(:call).with(env).and_return(10)
+ allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
+
expect(ActiveSupport::Notifications)
.to receive(:instrument)
.with('web_transaction_completed.load_balancing')
diff --git a/spec/lib/gitlab/database/load_balancing/session_spec.rb b/spec/lib/gitlab/database/load_balancing/session_spec.rb
index 74512f76fd4..05b44579c62 100644
--- a/spec/lib/gitlab/database/load_balancing/session_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/session_spec.rb
@@ -132,7 +132,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Session do
it 'does not prevent using primary if an exception is raised' do
instance = described_class.new
- instance.ignore_writes { raise ArgumentError } rescue ArgumentError
+ begin
+ instance.ignore_writes { raise ArgumentError }
+ rescue ArgumentError
+ nil
+ end
instance.write!
expect(instance).to be_using_primary
diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
index 31be3963565..8053bd57bba 100644
--- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_gitlab_redis_queues do
let(:middleware) { described_class.new }
let(:worker) { worker_class.new }
- let(:location) {'0/D525E3A8' }
+ let(:location) { '0/D525E3A8' }
let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } }
let(:job) { { "retry" => 3, "job_id" => "a180b47c-3fd6-41b8-81e9-34da61c3400e", 'wal_locations' => wal_locations } }
diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
index f3139bb1b4f..2ffb2c32c32 100644
--- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb
@@ -77,6 +77,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do
let(:last_write_location) { 'foo' }
before do
+ allow(ActiveSupport::Notifications).to receive(:instrument).and_call_original
+
allow(sticking)
.to receive(:last_write_location_for)
.with(:user, 42)
diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb
index f320fe0276f..76dfaa74ae6 100644
--- a/spec/lib/gitlab/database/load_balancing_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing do
+RSpec.describe Gitlab::Database::LoadBalancing, :suppress_gitlab_schemas_validate_connection do
describe '.base_models' do
it 'returns the models to apply load balancing to' do
models = described_class.base_models
diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
new file mode 100644
index 00000000000..eb527d492cf
--- /dev/null
+++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LockWritesManager do
+ let(:connection) { ApplicationRecord.connection }
+ let(:test_table) { '_test_table' }
+ let(:logger) { instance_double(Logger) }
+
+ subject(:lock_writes_manager) do
+ described_class.new(
+ table_name: test_table,
+ connection: connection,
+ database_name: 'main',
+ logger: logger
+ )
+ end
+
+ before do
+ allow(logger).to receive(:info)
+
+ connection.execute(<<~SQL)
+ CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0);
+
+ INSERT INTO #{test_table} (id, value)
+ VALUES (1, 1), (2, 2), (3, 3)
+ SQL
+ end
+
+ describe '#lock_writes' do
+ it 'prevents any writes on the table' do
+ subject.lock_writes
+
+ expect do
+ connection.execute("delete from #{test_table}")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/)
+ end
+
+ it 'prevents truncating the table' do
+ subject.lock_writes
+
+ expect do
+ connection.execute("truncate #{test_table}")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/)
+ end
+
+ it 'adds 3 triggers to the ci schema tables on the main database' do
+ expect do
+ subject.lock_writes
+ end.to change {
+ number_of_triggers_on(connection, test_table)
+ }.by(3) # Triggers to block INSERT / UPDATE / DELETE
+ # Triggers on TRUNCATE are not added to the information_schema.triggers
+ # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
+ end
+
+ it 'logs the write locking' do
+ expect(logger).to receive(:info).with("Database: 'main', Table: '_test_table': Lock Writes")
+
+ subject.lock_writes
+ end
+
+ it 'retries again if it receives a statement_timeout a few number of times' do
+ error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
+ call_count = 0
+ allow(connection).to receive(:execute) do |statement|
+ if statement.include?("CREATE TRIGGER")
+ call_count += 1
+ raise(ActiveRecord::QueryCanceled, error_message) if call_count.even?
+ end
+ end
+ subject.lock_writes
+ end
+
+ it 'raises the exception if it happened many times' do
+ error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
+ allow(connection).to receive(:execute) do |statement|
+ if statement.include?("CREATE TRIGGER")
+ raise(ActiveRecord::QueryCanceled, error_message)
+ end
+ end
+
+ expect do
+ subject.lock_writes
+ end.to raise_error(ActiveRecord::QueryCanceled)
+ end
+ end
+
+ describe '#unlock_writes' do
+ before do
+ subject.lock_writes
+ end
+
+ it 'allows writing on the table again' do
+ subject.unlock_writes
+
+ expect do
+ connection.execute("delete from #{test_table}")
+ end.not_to raise_error
+ end
+
+ it 'removes the write protection triggers from the gitlab_main tables on the ci database' do
+ expect do
+ subject.unlock_writes
+ end.to change {
+ number_of_triggers_on(connection, test_table)
+ }.by(-3) # Triggers to block INSERT / UPDATE / DELETE
+ # Triggers on TRUNCATE are not added to the information_schema.triggers
+ # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
+ end
+
+ it 'logs the write unlocking' do
+ expect(logger).to receive(:info).with("Database: 'main', Table: '_test_table': Allow Writes")
+
+ subject.unlock_writes
+ end
+ end
+
+ def number_of_triggers_on(connection, table_name)
+ connection
+ .select_value("SELECT count(*) FROM information_schema.triggers WHERE event_object_table=$1", nil, [table_name])
+ end
+end
diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
index 87a3e0f81e4..ff99f681b0c 100644
--- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
@@ -84,4 +84,32 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do
end
end
end
+
+ describe '.definitions' do
+ subject(:definitions) { described_class.definitions }
+
+ it 'contains at least all parent tables that have triggers' do
+ all_definition_parent_tables = definitions.map { |d| d.to_table }.to_set
+
+ triggers_query = <<~SQL
+ SELECT event_object_table, trigger_name
+ FROM information_schema.triggers
+ WHERE trigger_name LIKE '%_loose_fk_trigger'
+ GROUP BY event_object_table, trigger_name
+ SQL
+
+ all_triggers = ApplicationRecord.connection.execute(triggers_query)
+
+ all_triggers.each do |trigger|
+ table = trigger['event_object_table']
+ trigger_name = trigger['trigger_name']
+ error_message = <<~END
+ Missing a loose foreign key definition for parent table: #{table} with trigger: #{trigger_name}.
+ Loose foreign key definitions must be added before triggers are added and triggers must be removed before removing the loose foreign key definition.
+ Read more at https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html ."
+ END
+ expect(all_definition_parent_tables).to include(table), error_message
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
index 1009ec354c3..e43cfe0814e 100644
--- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -5,6 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, stub_feature_flags: false do
let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) }
+ # We keep only the GitlabSchemasValidateConnection analyzer running
+ around do |example|
+ Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection.with_suppressed(false) do
+ example.run
+ end
+ end
+
describe '#restrict_gitlab_migration' do
it 'invalid schema raises exception' do
expect { schema_class.restrict_gitlab_migration gitlab_schema: :gitlab_non_exisiting }
diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
index 5c054795697..2055dc33d48 100644
--- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb
@@ -266,7 +266,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
it 'sets the migration class name in the logs' do
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
@@ -280,7 +280,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
- model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
end
end
@@ -289,7 +289,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
it 'defaults to disallowing subtransactions' do
@@ -297,7 +297,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: false)).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
context 'when in transaction' do
@@ -323,7 +323,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do
end
it 'raises an error' do
- expect { model.with_lock_retries(env: env, logger: in_memory_logger) { } }.to raise_error /can not be run inside an already open transaction/
+ expect { model.with_lock_retries(env: env, logger: in_memory_logger) {} }.to raise_error /can not be run inside an already open transaction/
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 3ccc3a17862..dd5ad40d8ef 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe 'overridden dynamic model helpers' do
- let(:test_table) { '__test_batching_table' }
+ let(:test_table) { '_test_batching_table' }
before do
model.connection.execute(<<~SQL)
@@ -1022,6 +1022,40 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(Project.sum(:star_count)).to eq(2 * Project.count)
end
end
+
+ context 'when the table is write-locked' do
+ let(:test_table) { '_test_table' }
+ let(:lock_writes_manager) do
+ Gitlab::Database::LockWritesManager.new(
+ table_name: test_table,
+ connection: model.connection,
+ database_name: 'main'
+ )
+ end
+
+ before do
+ model.connection.execute(<<~SQL)
+ CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0);
+
+ INSERT INTO #{test_table} (id, value)
+ VALUES (1, 1), (2, 2), (3, 3)
+ SQL
+
+ lock_writes_manager.lock_writes
+ end
+
+ it 'disables the write-lock trigger function' do
+ expect do
+ model.update_column_in_batches(test_table, :value, Arel.sql('1+1'), disable_lock_writes: true)
+ end.not_to raise_error
+ end
+
+ it 'raises an error if it does not disable the trigger function' do
+ expect do
+ model.update_column_in_batches(test_table, :value, Arel.sql('1+1'), disable_lock_writes: false)
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "#{test_table}" is write protected/)
+ end
+ end
end
context 'when running inside the transaction' do
@@ -1080,6 +1114,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'renames a column concurrently' do
+ expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield
+
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers)
@@ -1112,6 +1148,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:connection) { ActiveRecord::Migration.connection }
before do
+ expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield
expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
.with(:users, connection: connection).and_return(copy_trigger)
end
@@ -1119,6 +1156,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
expect(model).to receive(:copy_indexes).with(:users, :id, :new)
expect(model).to receive(:add_not_null_constraint).with(:users, :new)
+ expect(model).to receive(:execute).with("SELECT set_config('lock_writes.users', 'false', true)")
expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
expect(copy_trigger).to receive(:create).with(:id, :new, trigger_name: nil)
@@ -1165,6 +1203,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'copies the default to the new column' do
+ expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield
+
expect(model).to receive(:change_column_default)
.with(:users, :new, old_column.default)
@@ -1176,6 +1216,34 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ context 'when the table in the other database is write-locked' do
+ let(:test_table) { '_test_table' }
+ let(:lock_writes_manager) do
+ Gitlab::Database::LockWritesManager.new(
+ table_name: test_table,
+ connection: model.connection,
+ database_name: 'main'
+ )
+ end
+
+ before do
+ model.connection.execute(<<~SQL)
+ CREATE TABLE #{test_table} (id integer NOT NULL, value integer NOT NULL DEFAULT 0);
+
+ INSERT INTO #{test_table} (id, value)
+ VALUES (1, 1), (2, 2), (3, 3)
+ SQL
+
+ lock_writes_manager.lock_writes
+ end
+
+ it 'does not raise an error when renaming the column' do
+ expect do
+ model.rename_column_concurrently(test_table, :value, :new_value)
+ end.not_to raise_error
+ end
+ end
+
context 'when the column to be renamed does not exist' do
before do
allow(model).to receive(:columns).and_return([])
@@ -1246,6 +1314,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'reverses the operations of cleanup_concurrent_column_rename' do
+ expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield
+
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers)
@@ -1302,6 +1372,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
it 'copies the default to the old column' do
+ expect(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection).to receive(:with_suppressed).and_yield
+
expect(model).to receive(:change_column_default)
.with(:users, :old, new_column.default)
@@ -2438,7 +2510,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
let(:env) { { 'DISABLE_LOCK_RETRIES' => 'true' } }
it 'sets the migration class name in the logs' do
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
buffer.rewind
expect(buffer.read).to include("\"class\":\"#{model.class}\"")
@@ -2452,7 +2524,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: raise_on_exhaustion)
- model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger, raise_on_exhaustion: raise_on_exhaustion) {}
end
end
@@ -2461,7 +2533,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(Gitlab::Database::WithLockRetries).to receive(:new).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
it 'defaults to allowing subtransactions' do
@@ -2470,7 +2542,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(Gitlab::Database::WithLockRetries).to receive(:new).with(hash_including(allow_savepoints: true)).and_return(with_lock_retries)
expect(with_lock_retries).to receive(:run).with(raise_on_exhaustion: false)
- model.with_lock_retries(env: env, logger: in_memory_logger) { }
+ model.with_lock_retries(env: env, logger: in_memory_logger) {}
end
end
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index c423340a572..f21f1ac5e52 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -37,12 +37,6 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
freeze_time { example.run }
end
- before do
- User.class_eval do
- include EachBatch
- end
- end
-
it 'returns the final expected delay' do
Sidekiq::Testing.fake! do
final_delay = model.queue_background_migration_jobs_by_range_at_intervals(User, 'FooJob', 10.minutes, batch_size: 2)
diff --git a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
index 5bfb2516ba1..a2f6e6b43ed 100644
--- a/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/batched_background_migration_helpers_spec.rb
@@ -15,12 +15,25 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
describe '#queue_batched_background_migration' do
let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) }
+ let(:job_class) do
+ Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
+ def self.name
+ 'MyJobClass'
+ end
+ end
+ end
before do
allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
expect(Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas).to receive(:require_dml_mode!)
allow(migration).to receive(:transaction_open?).and_return(false)
+
+ stub_const("Gitlab::Database::BackgroundMigration::BatchedMigration::JOB_CLASS_MODULE", '')
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |batched_migration|
+ allow(batched_migration).to receive(:job_class)
+ .and_return(job_class)
+ end
end
context 'when such migration already exists' do
@@ -42,7 +55,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect do
migration.queue_batched_background_migration(
- 'MyJobClass',
+ job_class.name,
:projects,
:id,
[:id], [:id_convert_to_bigint],
@@ -62,7 +75,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect do
migration.queue_batched_background_migration(
- 'MyJobClass',
+ job_class.name,
:projects,
:id,
job_interval: 5.minutes,
@@ -97,7 +110,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
it 'sets the job interval to the minimum value' do
expect do
- migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: minimum_delay - 1.minute)
+ migration.queue_batched_background_migration(job_class.name, :events, :id, job_interval: minimum_delay - 1.minute)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
@@ -107,26 +120,76 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
end
context 'when additional arguments are passed to the method' do
- it 'saves the arguments on the database record' do
- expect do
- migration.queue_batched_background_migration(
- 'MyJobClass',
- :projects,
- :id,
- 'my',
- 'arguments',
- job_interval: 5.minutes,
- batch_max_value: 1000)
- end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+ context 'when the job class provides job_arguments_count' do
+ context 'when defined job arguments for the job class does not match provided arguments' do
+ it 'raises an error' do
+ expect do
+ migration.queue_batched_background_migration(
+ job_class.name,
+ :projects,
+ :id,
+ 'my',
+ 'arguments',
+ job_interval: 2.minutes)
+ end.to raise_error(RuntimeError, /Wrong number of job arguments for MyJobClass \(given 2, expected 0\)/)
+ end
+ end
- expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
- job_class_name: 'MyJobClass',
- table_name: 'projects',
- column_name: 'id',
- interval: 300,
- min_value: 1,
- max_value: 1000,
- job_arguments: %w[my arguments])
+ context 'when defined job arguments for the job class match provided arguments' do
+ let(:job_class) do
+ Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
+ def self.name
+ 'MyJobClass'
+ end
+
+ job_arguments :foo, :bar
+ end
+ end
+
+ it 'saves the arguments on the database record' do
+ expect do
+ migration.queue_batched_background_migration(
+ job_class.name,
+ :projects,
+ :id,
+ 'my',
+ 'arguments',
+ job_interval: 5.minutes,
+ batch_max_value: 1000)
+ end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
+
+ expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to have_attributes(
+ job_class_name: 'MyJobClass',
+ table_name: 'projects',
+ column_name: 'id',
+ interval: 300,
+ min_value: 1,
+ max_value: 1000,
+ job_arguments: %w[my arguments])
+ end
+ end
+ end
+
+ context 'when the job class does not provide job_arguments_count' do
+ let(:job_class) do
+ Class.new do
+ def self.name
+ 'MyJobClass'
+ end
+ end
+ end
+
+ it 'does not raise an error' do
+ expect do
+ migration.queue_batched_background_migration(
+ job_class.name,
+ :projects,
+ :id,
+ 'my',
+ 'arguments',
+ job_interval: 2.minutes)
+ end.not_to raise_error
+ end
end
end
@@ -138,7 +201,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
it 'creates the record with the current max value' do
expect do
- migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ migration.queue_batched_background_migration(job_class.name, :events, :id, job_interval: 5.minutes)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
@@ -148,7 +211,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
it 'creates the record with an active status' do
expect do
- migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ migration.queue_batched_background_migration(job_class.name, :events, :id, job_interval: 5.minutes)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_active
@@ -158,7 +221,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
context 'when the database is empty' do
it 'sets the max value to the min value' do
expect do
- migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ migration.queue_batched_background_migration(job_class.name, :events, :id, job_interval: 5.minutes)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
@@ -168,7 +231,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
it 'creates the record with a finished status' do
expect do
- migration.queue_batched_background_migration('MyJobClass', :projects, :id, job_interval: 5.minutes)
+ migration.queue_batched_background_migration(job_class.name, :projects, :id, job_interval: 5.minutes)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
expect(Gitlab::Database::BackgroundMigration::BatchedMigration.last).to be_finished
@@ -181,7 +244,7 @@ RSpec.describe Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers d
expect(migration).to receive(:gitlab_schema_from_context).and_return(:gitlab_ci)
expect do
- migration.queue_batched_background_migration('MyJobClass', :events, :id, job_interval: 5.minutes)
+ migration.queue_batched_background_migration(job_class.name, :events, :id, job_interval: 5.minutes)
end.to change { Gitlab::Database::BackgroundMigration::BatchedMigration.count }.by(1)
created_migration = Gitlab::Database::BackgroundMigration::BatchedMigration.last
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index c31244060ec..3540a120b8f 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -122,7 +122,11 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
it 'records observations for all migrations' do
subject.observe(version: migration_version, name: migration_name, connection: connection) {}
- subject.observe(version: migration_version_2, name: migration_name_2, connection: connection) { raise 'something went wrong' } rescue nil
+ begin
+ subject.observe(version: migration_version_2, name: migration_name_2, connection: connection) { raise 'something went wrong' }
+ rescue StandardError
+ nil
+ end
expect { load_observation(result_dir, migration_name) }.not_to raise_error
expect { load_observation(result_dir, migration_name_2) }.not_to raise_error
diff --git a/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb b/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb
index 50ad77caaf1..6092d985ce8 100644
--- a/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb
+++ b/spec/lib/gitlab/database/migrations/lock_retry_mixin_spec.rb
@@ -83,10 +83,10 @@ RSpec.describe Gitlab::Database::Migrations::LockRetryMixin do
context 'with transactions disabled' do
let(:migration) { double('migration', enable_lock_retries?: false) }
- let(:receiver) { double('receiver', use_transaction?: false)}
+ let(:receiver) { double('receiver', use_transaction?: false) }
it 'calls super method' do
- p = proc { }
+ p = proc {}
expect(receiver).to receive(:ddl_transaction).with(migration, &p)
@@ -95,11 +95,11 @@ RSpec.describe Gitlab::Database::Migrations::LockRetryMixin do
end
context 'with transactions enabled, but lock retries disabled' do
- let(:receiver) { double('receiver', use_transaction?: true)}
+ let(:receiver) { double('receiver', use_transaction?: true) }
let(:migration) { double('migration', enable_lock_retries?: false) }
it 'calls super method' do
- p = proc { }
+ p = proc {}
expect(receiver).to receive(:ddl_transaction).with(migration, &p)
@@ -108,12 +108,12 @@ RSpec.describe Gitlab::Database::Migrations::LockRetryMixin do
end
context 'with transactions enabled and lock retries enabled' do
- let(:receiver) { double('receiver', use_transaction?: true)}
+ let(:receiver) { double('receiver', use_transaction?: true) }
let(:migration) { double('migration', migration_connection: connection, enable_lock_retries?: true) }
let(:connection) { ActiveRecord::Base.connection }
it 'calls super method' do
- p = proc { }
+ p = proc {}
expect(receiver).not_to receive(:ddl_transaction)
expect_next_instance_of(Gitlab::Database::WithLockRetries) do |retries|
diff --git a/spec/lib/gitlab/database/migrations/runner_spec.rb b/spec/lib/gitlab/database/migrations/runner_spec.rb
index e7f68e3e4a8..a37247ba0c6 100644
--- a/spec/lib/gitlab/database/migrations/runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/runner_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::Database::Migrations::Runner do
allow(described_class).to receive(:migration_context).and_return(ctx)
- names_this_branch = (applied_migrations_this_branch + pending_migrations).map { |m| "db/migrate/#{m.version}_#{m.name}.rb"}
+ names_this_branch = (applied_migrations_this_branch + pending_migrations).map { |m| "db/migrate/#{m.version}_#{m.name}.rb" }
allow(described_class).to receive(:migration_file_names_this_branch).and_return(names_this_branch)
end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index f1f72d71e1a..9451a6bd34a 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
let(:connection) { ApplicationRecord.connection }
- let(:table_name) { "_test_column_copying"}
+ let(:table_name) { "_test_column_copying" }
before do
connection.execute(<<~SQL)
@@ -50,18 +50,16 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
context 'with jobs to run' do
let(:migration_name) { 'TestBackgroundMigration' }
- before do
- migration.queue_batched_background_migration(
- migration_name, table_name, :id, job_interval: 5.minutes, batch_size: 100
- )
- end
-
it 'samples jobs' do
calls = []
define_background_migration(migration_name) do |*args|
calls << args
end
+ migration.queue_batched_background_migration(migration_name, table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 100)
+
described_class.new(result_dir: result_dir, connection: connection).run_jobs(for_duration: 3.minutes)
expect(calls.count).to eq(10) # 1000 rows / batch size 100 = 10
@@ -70,6 +68,9 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
context 'with multiple jobs to run' do
it 'runs all jobs created within the last 3 hours' do
old_migration = define_background_migration(migration_name)
+ migration.queue_batched_background_migration(migration_name, table_name, :id,
+ job_interval: 5.minutes,
+ batch_size: 100)
travel 4.hours
diff --git a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
index d8b06ee1a5d..04b9fba5b2f 100644
--- a/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
+++ b/spec/lib/gitlab/database/partitioning/sliding_list_strategy_spec.rb
@@ -48,61 +48,43 @@ RSpec.describe Gitlab::Database::Partitioning::SlidingListStrategy do
end
describe '#validate_and_fix' do
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(fix_sliding_list_partitioning: false)
- end
+ it 'does not call change_column_default if the partitioning in a valid state' do
+ expect(strategy.model.connection).not_to receive(:change_column_default)
- it 'does not try to fix the default partition value' do
- connection.change_column_default(model.table_name, strategy.partitioning_key, 3)
- expect(strategy.model.connection).not_to receive(:change_column_default)
- strategy.validate_and_fix
- end
+ strategy.validate_and_fix
end
- context 'feature flag is enabled' do
- before do
- stub_feature_flags(fix_sliding_list_partitioning: true)
- end
-
- it 'does not call change_column_default if the partitioning in a valid state' do
- expect(strategy.model.connection).not_to receive(:change_column_default)
-
- strategy.validate_and_fix
- end
-
- it 'calls change_column_default on partition_key with the most default partition number' do
- connection.change_column_default(model.table_name, strategy.partitioning_key, 1)
+ it 'calls change_column_default on partition_key with the most default partition number' do
+ connection.change_column_default(model.table_name, strategy.partitioning_key, 1)
- expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Fixed default value of sliding_list_strategy partitioning_key',
- connection_name: 'main',
- old_value: 1,
- new_value: 2,
- table_name: table_name,
- column: strategy.partitioning_key
- )
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Fixed default value of sliding_list_strategy partitioning_key',
+ connection_name: 'main',
+ old_value: 1,
+ new_value: 2,
+ table_name: table_name,
+ column: strategy.partitioning_key
+ )
- expect(strategy.model.connection).to receive(:change_column_default).with(
- model.table_name, strategy.partitioning_key, 2
- ).and_call_original
+ expect(strategy.model.connection).to receive(:change_column_default).with(
+ model.table_name, strategy.partitioning_key, 2
+ ).and_call_original
- strategy.validate_and_fix
- end
+ strategy.validate_and_fix
+ end
- it 'does not change the default column if it has been changed in the meanwhile by another process' do
- expect(strategy).to receive(:current_default_value).and_return(1, 2)
+ it 'does not change the default column if it has been changed in the meanwhile by another process' do
+ expect(strategy).to receive(:current_default_value).and_return(1, 2)
- expect(strategy.model.connection).not_to receive(:change_column_default)
+ expect(strategy.model.connection).not_to receive(:change_column_default)
- expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Table partitions or partition key default value have been changed by another process',
- table_name: table_name,
- default_value: 2
- )
+ expect(Gitlab::AppLogger).to receive(:warn).with(
+ message: 'Table partitions or partition key default value have been changed by another process',
+ table_name: table_name,
+ default_value: 2
+ )
- strategy.validate_and_fix
- end
+ strategy.validate_and_fix
end
end
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index 7c69f639aab..36c8b0811fe 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe Gitlab::Database::Partitioning do
end
it 'manages partitions for each given model' do
- expect { described_class.sync_partitions(models)}
+ expect { described_class.sync_partitions(models) }
.to change { find_partitions(table_names.first).size }.from(0)
.and change { find_partitions(table_names.last).size }.from(0)
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
index 5e8afc0102e..ddf5793049d 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
@@ -5,6 +5,13 @@ require 'spec_helper'
RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection, query_analyzers: false do
let(:analyzer) { described_class }
+ # We keep only the GitlabSchemasValidateConnection analyzer running
+ around do |example|
+ Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection.with_suppressed(false) do
+ example.run
+ end
+ end
+
context 'properly observes all queries', :request_store do
using RSpec::Parameterized::TableSyntax
@@ -61,6 +68,24 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection
end
end
+ context "when analyzer is enabled for tests", :query_analyzers do
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it "throws an error when trying to access a table that belongs to the gitlab_main schema from the ci database" do
+ expect do
+ Ci::ApplicationRecord.connection.execute("select * from users limit 1")
+ end.to raise_error(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection::CrossSchemaAccessError)
+ end
+
+ it "throws an error when trying to access a table that belongs to the gitlab_ci schema from the main database" do
+ expect do
+ ApplicationRecord.connection.execute("select * from ci_builds limit 1")
+ end.to raise_error(Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection::CrossSchemaAccessError)
+ end
+ end
+
def process_sql(model, sql)
Gitlab::Database::QueryAnalyzer.instance.within([analyzer]) do
# Skip load balancer and retrieve connection assigned to model
diff --git a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
index 34670696787..1bccdda3be1 100644
--- a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
include Database::DatabaseHelpers
let(:api_key) { "foo" }
- let(:api_url) { "http://bar"}
+ let(:api_url) { "http://bar" }
let(:additional_tag) { "some-tag" }
let(:action) { create(:reindex_action) }
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index 976b9896dfa..495e953f993 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -46,6 +46,27 @@ RSpec.describe Gitlab::Database::Reindexing do
end
end
+ context 'when async index destruction is enabled' do
+ it 'executes async index destruction prior to any reindexing actions' do
+ stub_feature_flags(database_async_index_destruction: true)
+
+ expect(Gitlab::Database::AsyncIndexes).to receive(:drop_pending_indexes!).ordered.exactly(databases_count).times
+ expect(described_class).to receive(:automatic_reindexing).ordered.exactly(databases_count).times
+
+ described_class.invoke
+ end
+ end
+
+ context 'when async index destruction is disabled' do
+ it 'does not execute async index destruction' do
+ stub_feature_flags(database_async_index_destruction: false)
+
+ expect(Gitlab::Database::AsyncIndexes).not_to receive(:drop_pending_indexes!)
+
+ described_class.invoke
+ end
+ end
+
context 'calls automatic reindexing' do
it 'uses all candidate indexes' do
expect(described_class).to receive(:automatic_reindexing).exactly(databases_count).times
diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb
index c88edc17817..7e0ba3397d1 100644
--- a/spec/lib/gitlab/database/shared_model_spec.rb
+++ b/spec/lib/gitlab/database/shared_model_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::Database::SharedModel do
shared_model = shared_model_class.new
- expect(shared_model.connection_db_config). to eq(described_class.connection_db_config)
+ expect(shared_model.connection_db_config).to eq(described_class.connection_db_config)
end
end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
index 6c32fb3ca17..836332524a9 100644
--- a/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_outside_transaction_spec.rb
@@ -232,14 +232,14 @@ RSpec.describe Gitlab::Database::WithLockRetriesOutsideTransaction do
expect(connection).to receive(:execute).with('RESET idle_in_transaction_session_timeout; RESET lock_timeout').and_call_original
expect(connection).to receive(:execute).with("SET lock_timeout TO '15ms'").and_call_original
- subject.run { }
+ subject.run {}
end
it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
expect(subject).to receive(:sleep).with(0.025)
- subject.run { }
+ subject.run {}
end
end
end
diff --git a/spec/lib/gitlab/database/with_lock_retries_spec.rb b/spec/lib/gitlab/database/with_lock_retries_spec.rb
index 6b35ccafabc..797a01c482d 100644
--- a/spec/lib/gitlab/database/with_lock_retries_spec.rb
+++ b/spec/lib/gitlab/database/with_lock_retries_spec.rb
@@ -248,14 +248,14 @@ RSpec.describe Gitlab::Database::WithLockRetries do
expect(connection).to receive(:execute).with("SET LOCAL lock_timeout TO '15ms'").and_call_original
expect(connection).to receive(:execute).with("RELEASE SAVEPOINT active_record_1", "TRANSACTION").and_call_original
- subject.run { }
+ subject.run {}
end
it 'calls `sleep` after the first iteration fails, using the configured sleep time' do
expect(subject).to receive(:run_block_with_lock_timeout).and_raise(ActiveRecord::LockWaitTimeout).twice
expect(subject).to receive(:sleep).with(0.025)
- subject.run { }
+ subject.run {}
end
end
@@ -265,13 +265,13 @@ RSpec.describe Gitlab::Database::WithLockRetries do
it 'prevents running inside already open transaction' do
allow(connection).to receive(:transaction_open?).and_return(true)
- expect { subject.run { } }.to raise_error(/should not run inside already open transaction/)
+ expect { subject.run {} }.to raise_error(/should not run inside already open transaction/)
end
it 'does not raise the error if not inside open transaction' do
allow(connection).to receive(:transaction_open?).and_return(false)
- expect { subject.run { } }.not_to raise_error
+ expect { subject.run {} }.not_to raise_error
end
end
end
diff --git a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
index fdf16069381..1150de880b5 100644
--- a/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
+++ b/spec/lib/gitlab/database_importers/common_metrics/importer_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe Gitlab::DatabaseImporters::CommonMetrics::Importer do
end
context 'if ID is missing' do
- let(:query_identifier) { }
+ let(:query_identifier) {}
it 'raises exception' do
expect { subject.execute }.to raise_error(Gitlab::DatabaseImporters::CommonMetrics::Importer::MissingQueryId)
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index 5350dda5fb2..1d1ffc8c275 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -115,6 +115,10 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
.once
.and_call_original
+ Gitlab::Redis::Cache.with do |redis|
+ expect(redis).to receive(:expire).with(cache.key, described_class::EXPIRATION)
+ end
+
2.times { cache.write_if_empty }
end
@@ -259,8 +263,12 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
describe '#key' do
subject { cache.key }
+ def options_hash(options_array)
+ OpenSSL::Digest::SHA256.hexdigest(options_array.join)
+ end
+
it 'returns cache key' do
- is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, true, true])}")
end
context 'when the `use_marker_ranges` feature flag is disabled' do
@@ -269,7 +277,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
it 'returns the original version of the cache' do
- is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false:true")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, false, true])}")
end
end
@@ -279,7 +287,7 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
end
it 'returns the original version of the cache' do
- is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:false")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{options_hash([cache.diff_options, true, false])}")
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index 624160d2f48..c378ecb8134 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -117,7 +117,7 @@ RSpec.describe Gitlab::Diff::Highlight do
it 'reports to Sentry if configured' do
expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).and_call_original
- expect { subject }. to raise_exception(RangeError)
+ expect { subject }.to raise_exception(RangeError)
end
end
diff --git a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
index 42ab2d1d063..ad92d90e253 100644
--- a/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
+++ b/spec/lib/gitlab/diff/rendered/notebook/diff_file_helper_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Gitlab::Diff::Rendered::Notebook::DiffFileHelper do
describe '#image_as_rich_text' do
let(:img) { 'data:image/png;base64,some_image_here' }
- let(:line_text) { " ![](#{img})"}
+ let(:line_text) { " ![](#{img})" }
subject { dummy.image_as_rich_text(line_text) }
diff --git a/spec/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512_spec.rb b/spec/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512_spec.rb
new file mode 100644
index 00000000000..e953733c997
--- /dev/null
+++ b/spec/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::DoorkeeperSecretStoring::Pbkdf2Sha512 do
+ describe '.transform_secret' do
+ let(:plaintext_token) { 'CzOBzBfU9F-HvsqfTaTXF4ivuuxYZuv3BoAK4pnvmyw' }
+
+ it 'generates a PBKDF2+SHA512 hashed value in the correct format' do
+ expect(described_class.transform_secret(plaintext_token))
+ .to eq("$pbkdf2-sha512$20000$$.c0G5XJVEew1TyeJk5TrkvB0VyOaTmDzPrsdNRED9vVeZlSyuG3G90F0ow23zUCiWKAVwmNnR/ceh.nJG3MdpQ") # rubocop:disable Layout/LineLength
+ end
+
+ context 'when hash_oauth_tokens is disabled' do
+ before do
+ stub_feature_flags(hash_oauth_tokens: false)
+ end
+
+ it 'returns a plaintext token' do
+ expect(described_class.transform_secret(plaintext_token)).to eq(plaintext_token)
+ end
+ end
+ end
+
+ describe 'STRETCHES' do
+ it 'is 20_000' do
+ expect(described_class::STRETCHES).to eq(20_000)
+ end
+ end
+
+ describe 'SALT' do
+ it 'is empty' do
+ expect(described_class::SALT).to be_empty
+ end
+ end
+end
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 9ff395070ea..585dce331ed 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
it_behaves_like :note_handler_shared_examples do
let(:recipient) { sent_notification.recipient }
- let(:update_commands_only) { fixture_file('emails/update_commands_only_reply.eml')}
+ let(:update_commands_only) { fixture_file('emails/update_commands_only_reply.eml') }
let(:no_content) { fixture_file('emails/no_content_reply.eml') }
let(:commands_in_reply) { fixture_file('emails/commands_in_reply.eml') }
let(:with_quick_actions) { fixture_file('emails/valid_reply_with_quick_actions.eml') }
@@ -54,7 +54,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
context 'with a secondary verified email address' do
- let(:verified_email) { 'alan@adventuretime.ooo'}
+ let(:verified_email) { 'alan@adventuretime.ooo' }
let(:email_raw) { fixture_file('emails/valid_reply.eml').gsub('jake@adventuretime.ooo', verified_email) }
before do
diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
index d0aba70081b..08a7383700b 100644
--- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb
@@ -493,11 +493,19 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
it 'does not create an issue' do
- expect { receiver.execute rescue nil }.not_to change { Issue.count }
+ expect do
+ receiver.execute
+ rescue StandardError
+ nil
+ end.not_to change { Issue.count }
end
it 'does not send thank you email' do
- expect { receiver.execute rescue nil }.not_to have_enqueued_job.on_queue('mailers')
+ expect do
+ receiver.execute
+ rescue StandardError
+ nil
+ end.not_to have_enqueued_job.on_queue('mailers')
end
end
@@ -532,7 +540,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
context 'service desk is disabled for the project' do
- let(:group) { create(:group)}
+ let(:group) { create(:group) }
let(:project) { create(:project, :public, group: group, path: 'test', service_desk_enabled: false) }
it 'bounces the email' do
@@ -540,7 +548,11 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do
end
it "doesn't create an issue" do
- expect { receiver.execute rescue nil }.not_to change { Issue.count }
+ expect do
+ receiver.execute
+ rescue StandardError
+ nil
+ end.not_to change { Issue.count }
end
end
end
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
index b5c3415fe12..7a09feb5b64 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/admin_verify_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::AdminVerify do
let(:series) { 0 }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe 'public methods' do
it 'returns value for series', :aggregate_failures do
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
index 35470ef3555..d5aec280ea6 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/create_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Create do
let_it_be(:group) { build(:group) }
let_it_be(:user) { build(:user) }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe "public methods" do
where(series: [0, 1, 2])
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
index daeacef53f6..3ac2076bf35 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_short_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::TeamShort do
let(:series) { 0 }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe 'public methods' do
it 'returns value for series', :aggregate_failures do
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
index eca8ba1df00..3354b2ed5cf 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/team_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Team do
let_it_be(:group) { build(:group) }
let_it_be(:user) { build(:user) }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe "public methods" do
where(series: [0, 1])
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
index ebad4672eb3..cf0a119ea80 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_short_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::TrialShort do
let(:series) { 0 }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe 'public methods' do
it 'returns value for series', :aggregate_failures do
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
index 3e18b8e35b6..7f86c9a6c6f 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/trial_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Trial do
let_it_be(:group) { build(:group) }
let_it_be(:user) { build(:user) }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe "public methods" do
where(series: [0, 1, 2])
diff --git a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
index a7da2e9553d..7e6f62289d2 100644
--- a/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
+++ b/spec/lib/gitlab/email/message/in_product_marketing/verify_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Verify do
let_it_be(:group) { build(:group) }
let_it_be(:user) { build(:user) }
- subject(:message) { described_class.new(group: group, user: user, series: series)}
+ subject(:message) { described_class.new(group: group, user: user, series: series) }
describe "public methods" do
context 'with series 0' do
diff --git a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
index 81e2a410962..bcd59c34ea2 100644
--- a/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
+++ b/spec/lib/gitlab/error_tracking/error_repository/open_api_strategy_spec.rb
@@ -430,7 +430,7 @@ RSpec.describe Gitlab::ErrorTracking::ErrorRepository::OpenApiStrategy do
it do
is_expected
- .to eq("#{config.scheme}://#{public_key}@#{config.host}/errortracking/api/v1/projects/api/#{project.id}")
+ .to eq("#{config.scheme}://#{public_key}@#{config.host}/errortracking/api/v1/projects/#{project.id}")
end
end
end
diff --git a/spec/lib/gitlab/error_tracking/logger_spec.rb b/spec/lib/gitlab/error_tracking/logger_spec.rb
index 751ec10a1f0..1b722fc7896 100644
--- a/spec/lib/gitlab/error_tracking/logger_spec.rb
+++ b/spec/lib/gitlab/error_tracking/logger_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::ErrorTracking::Logger do
describe '.capture_exception' do
let(:exception) { RuntimeError.new('boom') }
let(:payload) { { foo: '123' } }
- let(:log_entry) { { message: 'boom', context: payload }}
+ let(:log_entry) { { message: 'boom', context: payload } }
it 'calls Gitlab::ErrorTracking::Logger.error with formatted log entry' do
expect_next_instance_of(Gitlab::ErrorTracking::LogFormatter) do |log_formatter|
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index d33f8393904..bc4526758c0 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -159,13 +159,13 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
context 'when processing via the default error handler' do
context 'with Raven events' do
- let(:event) { raven_event}
+ let(:event) { raven_event }
include_examples 'Sidekiq arguments', args_in_job_hash: true
end
context 'with Sentry events' do
- let(:event) { sentry_event}
+ let(:event) { sentry_event }
include_examples 'Sidekiq arguments', args_in_job_hash: true
end
@@ -173,13 +173,13 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
context 'when processing via Gitlab::ErrorTracking' do
context 'with Raven events' do
- let(:event) { raven_event}
+ let(:event) { raven_event }
include_examples 'Sidekiq arguments', args_in_job_hash: false
end
context 'with Sentry events' do
- let(:event) { sentry_event}
+ let(:event) { sentry_event }
include_examples 'Sidekiq arguments', args_in_job_hash: false
end
@@ -209,13 +209,13 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
end
context 'with Raven events' do
- let(:event) { raven_event}
+ let(:event) { raven_event }
it_behaves_like 'handles jobstr fields'
end
context 'with Sentry events' do
- let(:event) { sentry_event}
+ let(:event) { sentry_event }
it_behaves_like 'handles jobstr fields'
end
@@ -233,13 +233,13 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
end
context 'with Raven events' do
- let(:event) { raven_event}
+ let(:event) { raven_event }
it_behaves_like 'does nothing'
end
context 'with Sentry events' do
- let(:event) { sentry_event}
+ let(:event) { sentry_event }
it_behaves_like 'does nothing'
end
@@ -256,13 +256,13 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor, :sentry do
end
context 'with Raven events' do
- let(:event) { raven_event}
+ let(:event) { raven_event }
it_behaves_like 'does nothing'
end
context 'with Sentry events' do
- let(:event) { sentry_event}
+ let(:event) { sentry_event }
it_behaves_like 'does nothing'
end
diff --git a/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb
index f74fbf1206f..1f30ac79488 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb
@@ -52,6 +52,28 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers::SleepingLock, :clean_gitlab_redis_
end
end
+ context 'when the lease is obtained already' do
+ let!(:lease) { stub_exclusive_lease_taken(key) }
+
+ context 'when retries are not specified' do
+ it 'retries to obtain a lease and raises an error' do
+ expect(lease).to receive(:try_obtain).exactly(10).times
+
+ expect { subject.obtain }.to raise_error('Failed to obtain a lock')
+ end
+ end
+
+ context 'when specified retries are above the maximum attempts' do
+ let(:max_attempts) { 100 }
+
+ it 'retries to obtain a lease and raises an error' do
+ expect(lease).to receive(:try_obtain).exactly(65).times
+
+ expect { subject.obtain(max_attempts) }.to raise_error('Failed to obtain a lock')
+ end
+ end
+ end
+
context 'when the lease is held elsewhere' do
let!(:lease) { stub_exclusive_lease_taken(key) }
let(:max_attempts) { 7 }
diff --git a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
index 8bf06bcebe2..f9db93a6167 100644
--- a/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
+++ b/spec/lib/gitlab/exclusive_lease_helpers_spec.rb
@@ -9,12 +9,12 @@ RSpec.describe Gitlab::ExclusiveLeaseHelpers, :clean_gitlab_redis_shared_state d
let(:unique_key) { SecureRandom.hex(10) }
describe '#in_lock' do
- subject { class_instance.in_lock(unique_key, **options) { } }
+ subject { class_instance.in_lock(unique_key, **options) {} }
let(:options) { {} }
context 'when unique key is not set' do
- let(:unique_key) { }
+ let(:unique_key) {}
it 'raises an error' do
expect { subject }.to raise_error ArgumentError
diff --git a/spec/lib/gitlab/file_markdown_link_builder_spec.rb b/spec/lib/gitlab/file_markdown_link_builder_spec.rb
index ea21bda12d3..d684beaaaca 100644
--- a/spec/lib/gitlab/file_markdown_link_builder_spec.rb
+++ b/spec/lib/gitlab/file_markdown_link_builder_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::FileMarkdownLinkBuilder do
end
describe 'markdown_link' do
- let(:url) { "/uploads/#{filename}"}
+ let(:url) { "/uploads/#{filename}" }
before do
allow(custom_class).to receive(:secure_url).and_return(url)
diff --git a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
index 2b1fcac9257..98fb154fb05 100644
--- a/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
+++ b/spec/lib/gitlab/form_builders/gitlab_ui_form_builder_spec.rb
@@ -9,6 +9,40 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
let_it_be(:form_builder) { described_class.new(:user, user, fake_action_view_base, {}) }
+ describe '#submit' do
+ context 'without pajamas_button enabled' do
+ subject(:submit_html) do
+ form_builder.submit('Save', class: 'gl-button btn-confirm custom-class', data: { test: true })
+ end
+
+ it 'renders a submit input' do
+ expected_html = <<~EOS
+ <input type="submit" name="commit" value="Save" class="gl-button btn-confirm custom-class" data-test="true" data-disable-with="Save" />
+ EOS
+
+ expect(html_strip_whitespace(submit_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+
+ context 'with pajamas_button enabled' do
+ subject(:submit_html) do
+ form_builder.submit('Save', pajamas_button: true, class: 'custom-class', data: { test: true })
+ end
+
+ it 'renders a submit button' do
+ expected_html = <<~EOS
+ <button class="gl-button btn btn-md btn-confirm custom-class" data-test="true" type="submit">
+ <span class="gl-button-text">
+ Save
+ </span>
+ </button>
+ EOS
+
+ expect(html_strip_whitespace(submit_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+ end
+
describe '#gitlab_ui_checkbox_component' do
context 'when not using slots' do
let(:optional_args) { {} }
@@ -25,7 +59,7 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" autocomplete="off" />
<input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
<label class="custom-control-label" for="user_view_diffs_file_by_file">
<span>Show one file at a time on merge request&#39;s Changes tab</span>
@@ -51,7 +85,7 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
it 'renders help text' do
expected_html = <<~EOS
<div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="1" />
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="1" autocomplete="off" />
<input class="custom-control-input checkbox-foo-bar" type="checkbox" value="3" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
<label class="custom-control-label label-foo-bar" for="user_view_diffs_file_by_file">
<span>Show one file at a time on merge request&#39;s Changes tab</span>
@@ -101,7 +135,7 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
it 'renders correct html' do
expected_html = <<~EOS
<div class="gl-form-checkbox custom-control custom-checkbox">
- <input name="user[view_diffs_file_by_file]" type="hidden" value="0" />
+ <input name="user[view_diffs_file_by_file]" type="hidden" value="0" autocomplete="off" />
<input class="custom-control-input" type="checkbox" value="1" name="user[view_diffs_file_by_file]" id="user_view_diffs_file_by_file" />
<label class="custom-control-label" for="user_view_diffs_file_by_file">
<span>Show one file at a time on merge request&#39;s Changes tab</span>
@@ -195,6 +229,45 @@ RSpec.describe Gitlab::FormBuilders::GitlabUiFormBuilder do
end
end
+ describe '#gitlab_ui_datepicker' do
+ subject(:datepicker_html) do
+ form_builder.gitlab_ui_datepicker(
+ :expires_at,
+ **optional_args
+ )
+ end
+
+ let(:optional_args) { {} }
+
+ context 'without optional arguments' do
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <input class="datepicker form-control gl-form-input" type="text" name="user[expires_at]" id="user_expires_at" />
+ EOS
+
+ expect(html_strip_whitespace(datepicker_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+
+ context 'with optional arguments' do
+ let(:optional_args) do
+ {
+ id: 'milk_gone_bad',
+ data: { action: 'throw' },
+ value: '2022-08-01'
+ }
+ end
+
+ it 'renders correct html' do
+ expected_html = <<~EOS
+ <input id="milk_gone_bad" data-action="throw" value="2022-08-01" class="datepicker form-control gl-form-input" type="text" name="user[expires_at]" />
+ EOS
+
+ expect(html_strip_whitespace(datepicker_html)).to eq(html_strip_whitespace(expected_html))
+ end
+ end
+ end
+
private
def html_strip_whitespace(html)
diff --git a/spec/lib/gitlab/git/blame_spec.rb b/spec/lib/gitlab/git/blame_spec.rb
index e514e128785..45d88f57c09 100644
--- a/spec/lib/gitlab/git/blame_spec.rb
+++ b/spec/lib/gitlab/git/blame_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe Gitlab::Git::Blame do
it 'only returns the range' do
expect(result.size).to eq(range.size)
- expect(result.map {|r| r[:line] }).to eq(['', 'This guide details how contribute to GitLab.', ''])
+ expect(result.map { |r| r[:line] }).to eq(['', 'This guide details how contribute to GitLab.', ''])
end
end
diff --git a/spec/lib/gitlab/git/blob_spec.rb b/spec/lib/gitlab/git/blob_spec.rb
index fb4510a78de..0da7aa7dad0 100644
--- a/spec/lib/gitlab/git/blob_spec.rb
+++ b/spec/lib/gitlab/git/blob_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Gitlab::Git::Blob, :seed_helper do
end
context 'utf-8 branch' do
- let(:blob) { Gitlab::Git::Blob.find(repository, 'Ääh-test-utf-8', "files/ruby/popen.rb")}
+ let(:blob) { Gitlab::Git::Blob.find(repository, 'Ääh-test-utf-8', "files/ruby/popen.rb") }
it { expect(blob.id).to eq(SeedRepo::RubyBlob::ID) }
end
@@ -235,6 +235,7 @@ RSpec.describe Gitlab::Git::Blob, :seed_helper do
it { expect(blob.id).to eq('409f37c4f05865e4fb208c771485f211a22c4c2d') }
it { expect(blob.data).to eq('') }
+
it 'does not mark the blob as binary' do
expect(blob).not_to be_binary_in_repo
end
diff --git a/spec/lib/gitlab/git/branch_spec.rb b/spec/lib/gitlab/git/branch_spec.rb
index 97cd4777b4d..feaa1f6595c 100644
--- a/spec/lib/gitlab/git/branch_spec.rb
+++ b/spec/lib/gitlab/git/branch_spec.rb
@@ -2,8 +2,9 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Branch, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+RSpec.describe Gitlab::Git::Branch do
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
subject { repository.branches }
@@ -54,14 +55,14 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
describe '#size' do
subject { super().size }
- it { is_expected.to eq(SeedRepo::Repo::BRANCHES.size) }
+ it { is_expected.to eq(TestEnv::BRANCH_SHA.size) }
end
describe 'first branch' do
let(:branch) { repository.branches.first }
- it { expect(branch.name).to eq(SeedRepo::Repo::BRANCHES.first) }
- it { expect(branch.dereferenced_target.sha).to eq("0b4bc9a49b562e85de7cc9e834518ea6828729b9") }
+ it { expect(branch.name).to eq(TestEnv::BRANCH_SHA.keys.min) }
+ it { expect(branch.dereferenced_target.sha).to start_with(TestEnv::BRANCH_SHA[TestEnv::BRANCH_SHA.keys.min]) }
end
describe 'master branch' do
@@ -69,14 +70,10 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
repository.branches.find { |branch| branch.name == 'master' }
end
- it { expect(branch.dereferenced_target.sha).to eq(SeedRepo::LastCommit::ID) }
+ it { expect(branch.dereferenced_target.sha).to start_with(TestEnv::BRANCH_SHA['master']) }
end
context 'with active, stale and future branches' do
- let(:repository) do
- Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '', 'group/project')
- end
-
let(:user) { create(:user) }
let(:stale_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago - 5.days) { create_commit } }
let(:active_sha) { travel_to(Gitlab::Git::Branch::STALE_BRANCH_THRESHOLD.ago + 5.days) { create_commit } }
@@ -88,10 +85,6 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
repository.create_branch('future-1', future_sha)
end
- after do
- ensure_seeds
- end
-
describe 'examine if the branch is active or stale' do
let(:stale_branch) { repository.find_branch('stale-1') }
let(:active_branch) { repository.find_branch('active-1') }
@@ -117,8 +110,6 @@ RSpec.describe Gitlab::Git::Branch, :seed_helper do
end
end
- it { expect(repository.branches.size).to eq(SeedRepo::Repo::BRANCHES.size) }
-
def create_commit
repository.multi_action(
user,
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index da77d8ee5d6..95b49186d0f 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -222,6 +222,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 10 elements' do
expect(subject.size).to eq(10)
end
+
it { is_expected.to include(SeedRepo::EmptyCommit::ID) }
end
@@ -240,6 +241,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 10 elements' do
expect(subject.size).to eq(10)
end
+
it { is_expected.to include(SeedRepo::EmptyCommit::ID) }
end
@@ -259,6 +261,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 3 elements' do
expect(subject.size).to eq(3)
end
+
it { is_expected.to include("d14d6c0abdd253381df51a723d58691b2ee1ab08") }
it { is_expected.not_to include("eb49186cfa5c4338011f5f590fac11bd66c5c631") }
end
@@ -279,6 +282,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 3 elements' do
expect(subject.size).to eq(3)
end
+
it { is_expected.to include("2f63565e7aac07bcdadb654e253078b727143ec4") }
it { is_expected.not_to include(SeedRepo::Commit::ID) }
end
@@ -299,6 +303,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 3 elements' do
expect(subject.size).to eq(3)
end
+
it { is_expected.to include("874797c3a73b60d2187ed6e2fcabd289ff75171e") }
it { is_expected.not_to include(SeedRepo::Commit::ID) }
end
@@ -570,13 +575,13 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
describe '#id' do
subject { super().id }
- it { is_expected.to eq(sample_commit_hash[:id])}
+ it { is_expected.to eq(sample_commit_hash[:id]) }
end
describe '#message' do
subject { super().message }
- it { is_expected.to eq(sample_commit_hash[:message])}
+ it { is_expected.to eq(sample_commit_hash[:message]) }
end
end
@@ -648,6 +653,7 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do
it 'has 2 element' do
expect(subject.size).to eq(2)
end
+
it { is_expected.to include("master") }
it { is_expected.not_to include("feature") }
end
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 114b3d01952..0e3e92e03cf 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -520,7 +520,7 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
describe '#real_size' do
subject { super().real_size }
- it { is_expected.to eq('0')}
+ it { is_expected.to eq('0') }
end
describe '#line_count' do
@@ -595,7 +595,7 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
context 'multi-file collections' do
- let(:iterator) { [{ diff: 'b' }, { diff: 'a' * 20480 }]}
+ let(:iterator) { [{ diff: 'b' }, { diff: 'a' * 20480 }] }
it 'prunes diffs that are quite big' do
diff = nil
diff --git a/spec/lib/gitlab/git/raw_diff_change_spec.rb b/spec/lib/gitlab/git/raw_diff_change_spec.rb
index f894ae1d98b..c55fcc729b6 100644
--- a/spec/lib/gitlab/git/raw_diff_change_spec.rb
+++ b/spec/lib/gitlab/git/raw_diff_change_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Git::RawDiffChange do
- let(:raw_change) { }
+ let(:raw_change) {}
let(:change) { described_class.new(raw_change) }
context 'bad input' do
diff --git a/spec/lib/gitlab/git/remote_repository_spec.rb b/spec/lib/gitlab/git/remote_repository_spec.rb
deleted file mode 100644
index c7bc81573a6..00000000000
--- a/spec/lib/gitlab/git/remote_repository_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Git::RemoteRepository, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
-
- subject { described_class.new(repository) }
-
- describe '#empty?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:repository, :result) do
- Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') | false
- Gitlab::Git::Repository.new('default', 'does-not-exist.git', '', 'group/project') | true
- end
-
- with_them do
- it { expect(subject.empty?).to eq(result) }
- end
- end
-
- describe '#commit_id' do
- it 'returns an OID if the revision exists' do
- expect(subject.commit_id('v1.0.0')).to eq('6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9')
- end
-
- it 'is nil when the revision does not exist' do
- expect(subject.commit_id('does-not-exist')).to be_nil
- end
- end
-
- describe '#branch_exists?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:branch, :result) do
- 'master' | true
- 'does-not-exist' | false
- end
-
- with_them do
- it { expect(subject.branch_exists?(branch)).to eq(result) }
- end
- end
-
- describe '#same_repository?' do
- using RSpec::Parameterized::TableSyntax
-
- where(:other_repository, :result) do
- repository | true
- Gitlab::Git::Repository.new(repository.storage, repository.relative_path, '', 'group/project') | true
- Gitlab::Git::Repository.new('broken', TEST_REPO_PATH, '', 'group/project') | false
- Gitlab::Git::Repository.new(repository.storage, 'wrong/relative-path.git', '', 'group/project') | false
- Gitlab::Git::Repository.new('broken', 'wrong/relative-path.git', '', 'group/project') | false
- end
-
- with_them do
- it { expect(subject.same_repository?(other_repository)).to eq(result) }
- end
- end
-end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index e20d5b928c4..a1fb8b70bd7 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -1252,8 +1252,8 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#raw_changes_between' do
- let(:old_rev) { }
- let(:new_rev) { }
+ let(:old_rev) {}
+ let(:new_rev) {}
let(:changes) { repository.raw_changes_between(old_rev, new_rev) }
context 'initial commit' do
@@ -1837,6 +1837,47 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#find_tag' do
+ it 'returns a tag' do
+ tag = repository.find_tag('v1.0.0')
+
+ expect(tag).to be_a_kind_of(Gitlab::Git::Tag)
+ expect(tag.name).to eq('v1.0.0')
+ end
+
+ shared_examples 'a nonexistent tag' do
+ it 'returns nil' do
+ expect(repository.find_tag('this-is-garbage')).to be_nil
+ end
+ end
+
+ context 'when asking for a non-existent tag' do
+ it_behaves_like 'a nonexistent tag'
+ end
+
+ context 'when Gitaly returns Internal error' do
+ before do
+ expect(repository.gitaly_ref_client)
+ .to receive(:find_tag)
+ .and_raise(GRPC::Internal, "tag not found")
+ end
+
+ it_behaves_like 'a nonexistent tag'
+ end
+
+ context 'when Gitaly returns tag_not_found error' do
+ before do
+ expect(repository.gitaly_ref_client)
+ .to receive(:find_tag)
+ .and_raise(new_detailed_error(GRPC::Core::StatusCodes::NOT_FOUND,
+ "tag was not found",
+ Gitaly::FindTagError.new(tag_not_found: Gitaly::ReferenceNotFoundError.new)))
+ end
+
+ it_behaves_like 'a nonexistent tag'
+ end
+ end
+
describe '#languages' do
it 'returns exactly the expected results' do
languages = repository.languages('4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6')
@@ -2017,17 +2058,14 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
describe '#set_full_path' do
before do
- repository_rugged.config["gitlab.fullpath"] = repository_path
+ repository.set_full_path(full_path: repository_path)
end
context 'is given a path' do
it 'writes it to disk' do
repository.set_full_path(full_path: "not-the/real-path.git")
- config = File.read(File.join(repository_path, "config"))
-
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = not-the/real-path.git")
+ expect(repository.full_path).to eq('not-the/real-path.git')
end
end
@@ -2035,15 +2073,12 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
it 'does not write it to disk' do
repository.set_full_path(full_path: "")
- config = File.read(File.join(repository_path, "config"))
-
- expect(config).to include("[gitlab]")
- expect(config).to include("fullpath = #{repository_path}")
+ expect(repository.full_path).to eq(repository_path)
end
end
context 'repository does not exist' do
- it 'raises NoRepository and does not call Gitaly WriteConfig' do
+ it 'raises NoRepository and does not call SetFullPath' do
repository = Gitlab::Git::Repository.new('default', 'does/not/exist.git', '', 'group/project')
expect(repository.gitaly_repository_client).not_to receive(:set_full_path)
@@ -2055,6 +2090,18 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#full_path' do
+ let(:full_path) { 'some/path' }
+
+ before do
+ repository.set_full_path(full_path: full_path)
+ end
+
+ it 'returns the full path' do
+ expect(repository.full_path).to eq(full_path)
+ end
+ end
+
describe '#merge_to_ref' do
let(:repository) { mutable_repository }
let(:branch_head) { '6d394385cf567f80a8fd85055db1ab4c5295806f' }
@@ -2468,7 +2515,7 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
describe '#rename' do
- let(:project) { create(:project, :repository)}
+ let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
it 'moves the repository' do
diff --git a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
index b2603e099e6..03d1c125e36 100644
--- a/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
+++ b/spec/lib/gitlab/git/rugged_impl/use_rugged_spec.rb
@@ -58,35 +58,55 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
end
end
- context 'when not running puma with multiple threads' do
- before do
- allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
+ context 'when skip_rugged_auto_detect feature flag is enabled' do
+ context 'when not running puma with multiple threads' do
+ before do
+ allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
+ stub_feature_flags(feature_flag_name => nil)
+ stub_feature_flags(skip_rugged_auto_detect: true)
+ end
+
+ it 'returns false' do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
+ end
end
+ end
- it 'returns true when gitaly matches disk' do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ context 'when skip_rugged_auto_detect feature flag is disabled' do
+ before do
+ stub_feature_flags(skip_rugged_auto_detect: false)
end
- it 'returns false when disk access fails' do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
+ context 'when not running puma with multiple threads' do
+ before do
+ allow(subject).to receive(:running_puma_with_multiple_threads?).and_return(false)
+ end
- expect(subject.use_rugged?(repository, feature_flag_name)).to be false
- end
+ it 'returns true when gitaly matches disk' do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ end
- it "returns false when gitaly doesn't match disk" do
- allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
+ it 'returns false when disk access fails' do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return("/fake/path/doesnt/exist")
- expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be false
+ end
- File.delete(temp_gitaly_metadata_file)
- end
+ it "returns false when gitaly doesn't match disk" do
+ allow(Gitlab::GitalyClient).to receive(:storage_metadata_file_path).and_return(temp_gitaly_metadata_file)
+
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be_falsey
- it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
- expect(subject.use_rugged?(repository, feature_flag_name)).to be true
+ File.delete(temp_gitaly_metadata_file)
+ end
- expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
+ it "doesn't lead to a second rpc call because gitaly client should use the cached value" do
+ expect(subject.use_rugged?(repository, feature_flag_name)).to be true
- subject.use_rugged?(repository, feature_flag_name)
+ expect(Gitlab::GitalyClient).not_to receive(:filesystem_id)
+
+ subject.use_rugged?(repository, feature_flag_name)
+ end
end
end
end
@@ -165,7 +185,7 @@ RSpec.describe Gitlab::Git::RuggedImpl::UseRugged, :seed_helper do
context 'all features are enabled' do
let(:feature_keys) { [:feature_key_1, :feature_key_2] }
- it { is_expected.to be_truthy}
+ it { is_expected.to be_truthy }
end
context 'all features are not enabled' do
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index 4f56595d7d2..240cf6ed46f 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -2,12 +2,13 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Tag, :seed_helper do
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+RSpec.describe Gitlab::Git::Tag do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:repository) { project.repository.raw }
describe '#tags' do
- describe 'first tag' do
- let(:tag) { repository.tags.first }
+ describe 'unsigned tag' do
+ let(:tag) { repository.tags.detect { |t| t.name == 'v1.0.0' } }
it { expect(tag.name).to eq("v1.0.0") }
it { expect(tag.target).to eq("f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8") }
@@ -22,29 +23,13 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0200") }
end
- describe 'last tag' do
- let(:tag) { repository.tags.last }
-
- it { expect(tag.name).to eq("v1.2.1") }
- it { expect(tag.target).to eq("2ac1f24e253e08135507d0830508febaaccf02ee") }
- it { expect(tag.dereferenced_target.sha).to eq("fa1b1e6c004a68b7d8763b86455da9e6b23e36d6") }
- it { expect(tag.message).to eq("Version 1.2.1") }
- it { expect(tag.has_signature?).to be_falsey }
- it { expect(tag.signature_type).to eq(:NONE) }
- it { expect(tag.signature).to be_nil }
- it { expect(tag.tagger.name).to eq("Douwe Maan") }
- it { expect(tag.tagger.email).to eq("douwe@selenight.nl") }
- it { expect(tag.tagger.date).to eq(Google::Protobuf::Timestamp.new(seconds: 1427789449)) }
- it { expect(tag.tagger.timezone).to eq("+0200") }
- end
-
describe 'signed tag' do
- let(:project) { create(:project, :repository) }
- let(:tag) { project.repository.find_tag('v1.1.1') }
+ let(:tag) { repository.tags.detect { |t| t.name == 'v1.1.1' } }
+ it { expect(tag.name).to eq("v1.1.1") }
it { expect(tag.target).to eq("8f03acbcd11c53d9c9468078f32a2622005a4841") }
it { expect(tag.dereferenced_target.sha).to eq("189a6c924013fc3fe40d6f1ec1dc20214183bc97") }
- it { expect(tag.message).to eq("x509 signed tag" + "\n" + X509Helpers::User1.signed_tag_signature.chomp) }
+ it { expect(tag.message).to eq("x509 signed tag\n" + X509Helpers::User1.signed_tag_signature.chomp) }
it { expect(tag.has_signature?).to be_truthy }
it { expect(tag.signature_type).to eq(:X509) }
it { expect(tag.signature).not_to be_nil }
@@ -54,11 +39,11 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it { expect(tag.tagger.timezone).to eq("+0100") }
end
- it { expect(repository.tags.size).to eq(SeedRepo::Repo::TAGS.size) }
+ it { expect(repository.tags.size).to be > 0 }
end
describe '.get_message' do
- let(:tag_ids) { %w[f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8 8a2a6eb295bb170b34c24c76c49ed0e9b2eaf34b] }
+ let(:tag_ids) { %w[f4e6814c3e4e7a0de82a9e7cd20c626cc963a2f8 8f03acbcd11c53d9c9468078f32a2622005a4841] }
subject do
tag_ids.map { |id| described_class.get_message(repository, id) }
@@ -66,7 +51,7 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
it 'gets tag messages' do
expect(subject[0]).to eq("Release\n")
- expect(subject[1]).to eq("Version 1.1.0\n")
+ expect(subject[1]).to eq("x509 signed tag\n" + X509Helpers::User1.signed_tag_signature)
end
it 'gets messages in one batch', :request_store do
diff --git a/spec/lib/gitlab/git/tree_spec.rb b/spec/lib/gitlab/git/tree_spec.rb
index 172d7a3f27b..b520de03929 100644
--- a/spec/lib/gitlab/git/tree_spec.rb
+++ b/spec/lib/gitlab/git/tree_spec.rb
@@ -2,10 +2,11 @@
require "spec_helper"
-RSpec.describe Gitlab::Git::Tree, :seed_helper do
+RSpec.describe Gitlab::Git::Tree do
let_it_be(:user) { create(:user) }
- let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') }
+ let(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
shared_examples :repo do
subject(:tree) { Gitlab::Git::Tree.where(repository, sha, path, recursive, pagination_params) }
@@ -105,10 +106,6 @@ RSpec.describe Gitlab::Git::Tree, :seed_helper do
).newrev
end
- after do
- ensure_seeds
- end
-
it { expect(subdir_file.flat_path).to eq('files/flat/path/correct') }
end
end
diff --git a/spec/lib/gitlab/git_access_spec.rb b/spec/lib/gitlab/git_access_spec.rb
index 5ee9cf05b3e..8577cad1011 100644
--- a/spec/lib/gitlab/git_access_spec.rb
+++ b/spec/lib/gitlab/git_access_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GitAccess do
+RSpec.describe Gitlab::GitAccess, :aggregate_failures do
include TermsHelper
include GitHelpers
include AdminModeHelper
@@ -78,9 +78,7 @@ RSpec.describe Gitlab::GitAccess do
let(:auth_result_type) { :ci }
it "doesn't block http pull" do
- aggregate_failures do
- expect { pull_access_check }.not_to raise_error
- end
+ expect { pull_access_check }.not_to raise_error
end
end
end
@@ -153,6 +151,15 @@ RSpec.describe Gitlab::GitAccess do
it 'does not block pushes with "not found"' do
expect { push_access_check }.to raise_forbidden(described_class::ERROR_MESSAGES[:auth_upload])
end
+
+ it 'logs' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ message: 'Actor was :ci',
+ project_id: project.id
+ ).once
+
+ pull_access_check
+ end
end
context 'when actor is DeployToken' do
@@ -229,9 +236,9 @@ RSpec.describe Gitlab::GitAccess do
end
context 'key is expired' do
- let(:actor) { create(:rsa_key_2048, :expired) }
+ let(:actor) { create(:deploy_key, :expired) }
- it 'does not allow expired keys', :aggregate_failures do
+ it 'does not allow expired keys' do
expect { pull_access_check }.to raise_forbidden('Your SSH key has expired.')
expect { push_access_check }.to raise_forbidden('Your SSH key has expired.')
end
@@ -242,7 +249,7 @@ RSpec.describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: 4096)
end
- it 'does not allow keys which are too small', :aggregate_failures do
+ it 'does not allow keys which are too small' do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_forbidden('Your SSH key must be at least 4096 bits.')
expect { push_access_check }.to raise_forbidden('Your SSH key must be at least 4096 bits.')
@@ -254,7 +261,7 @@ RSpec.describe Gitlab::GitAccess do
stub_application_setting(rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE)
end
- it 'does not allow keys which are too small', :aggregate_failures do
+ it 'does not allow keys which are too small' do
expect(actor).not_to be_valid
expect { pull_access_check }.to raise_forbidden(/Your SSH key type is forbidden/)
expect { push_access_check }.to raise_forbidden(/Your SSH key type is forbidden/)
@@ -263,7 +270,7 @@ RSpec.describe Gitlab::GitAccess do
end
it_behaves_like '#check with a key that is not valid' do
- let(:actor) { build(:rsa_key_2048, user: user) }
+ let(:actor) { build(:deploy_key, user: user) }
end
it_behaves_like '#check with a key that is not valid' do
@@ -736,6 +743,15 @@ RSpec.describe Gitlab::GitAccess do
context 'pull code' do
it { expect { pull_access_check }.not_to raise_error }
+
+ it 'logs' do
+ expect(Gitlab::AppJsonLogger).to receive(:info).with(
+ message: 'Actor was :ci',
+ project_id: project.id
+ ).once
+
+ pull_access_check
+ end
end
end
end
@@ -1163,13 +1179,13 @@ RSpec.describe Gitlab::GitAccess do
-> { push_access_check }]
end
- it 'blocks access when the user did not accept terms', :aggregate_failures do
+ it 'blocks access when the user did not accept terms' do
actions.each do |action|
expect { action.call }.to raise_forbidden(/must accept the Terms of Service in order to perform this action/)
end
end
- it 'allows access when the user accepted the terms', :aggregate_failures do
+ it 'allows access when the user accepted the terms' do
accept_terms(user)
actions.each do |action|
diff --git a/spec/lib/gitlab/git_spec.rb b/spec/lib/gitlab/git_spec.rb
index 784d25f55c1..f359679a930 100644
--- a/spec/lib/gitlab/git_spec.rb
+++ b/spec/lib/gitlab/git_spec.rb
@@ -54,6 +54,7 @@ RSpec.describe Gitlab::Git do
with_them do
it { expect(described_class.shas_eql?(sha1, sha2)).to eq(result) }
+
it 'is commutative' do
expect(described_class.shas_eql?(sha2, sha1)).to eq(result)
end
diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
index d5d1bef7bff..0d591fe6c43 100644
--- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb
@@ -340,7 +340,7 @@ RSpec.describe Gitlab::GitalyClient::CommitService do
describe '#list_new_commits' do
let(:revisions) { [revision] }
let(:gitaly_commits) { create_list(:gitaly_commit, 3) }
- let(:expected_commits) { gitaly_commits.map { |c| Gitlab::Git::Commit.new(repository, c) }}
+ let(:expected_commits) { gitaly_commits.map { |c| Gitlab::Git::Commit.new(repository, c) } }
subject do
client.list_new_commits(revisions)
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index e04895d975f..5d854f0c9d1 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -84,37 +84,6 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
subject
end
- describe '#user_merge_to_ref' do
- let(:first_parent_ref) { 'refs/heads/my-branch' }
- let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
- let(:ref) { 'refs/merge-requests/x/merge' }
- let(:message) { 'validación' }
- let(:response) { Gitaly::UserMergeToRefResponse.new(commit_id: 'new-commit-id') }
-
- let(:payload) do
- { source_sha: source_sha, branch: 'branch', target_ref: ref,
- message: message, first_parent_ref: first_parent_ref, allow_conflicts: true }
- end
-
- it 'sends a user_merge_to_ref message' do
- freeze_time do
- expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_merge_to_ref) do |_, request, options|
- expect(options).to be_kind_of(Hash)
- expect(request.to_h).to eq(
- payload.merge({
- repository: repository.gitaly_repository.to_h,
- message: message.dup.force_encoding(Encoding::ASCII_8BIT),
- user: Gitlab::Git::User.from_gitlab(user).to_gitaly.to_h,
- timestamp: { nanos: 0, seconds: Time.current.to_i }
- })
- )
- end.and_return(response)
-
- client.user_merge_to_ref(user, **payload)
- end
- end
- end
-
context "when pre_receive_error is present" do
let(:response) do
Gitaly::UserUpdateBranchResponse.new(pre_receive_error: "GitLab: something failed")
@@ -131,6 +100,37 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
+ describe '#user_merge_to_ref' do
+ let(:first_parent_ref) { 'refs/heads/my-branch' }
+ let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
+ let(:ref) { 'refs/merge-requests/x/merge' }
+ let(:message) { 'validación' }
+ let(:response) { Gitaly::UserMergeToRefResponse.new(commit_id: 'new-commit-id') }
+
+ let(:payload) do
+ { source_sha: source_sha, branch: 'branch', target_ref: ref,
+ message: message, first_parent_ref: first_parent_ref, allow_conflicts: true }
+ end
+
+ it 'sends a user_merge_to_ref message' do
+ freeze_time do
+ expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_merge_to_ref) do |_, request, options|
+ expect(options).to be_kind_of(Hash)
+ expect(request.to_h).to eq(
+ payload.merge({
+ repository: repository.gitaly_repository.to_h,
+ message: message.dup.force_encoding(Encoding::ASCII_8BIT),
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly.to_h,
+ timestamp: { nanos: 0, seconds: Time.current.to_i }
+ })
+ )
+ end.and_return(response)
+
+ client.user_merge_to_ref(user, **payload)
+ end
+ end
+ end
+
describe '#user_delete_branch' do
let(:branch_name) { 'my-branch' }
let(:request) do
@@ -551,7 +551,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
let(:expected_error) { Gitlab::Git::Repository::CreateTreeError }
- let(:expected_error_message) { }
+ let(:expected_error_message) {}
it_behaves_like '#user_cherry_pick with a gRPC error'
end
@@ -559,7 +559,7 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
context 'when a non-detailed gRPC error is raised' do
let(:raised_error) { GRPC::Internal.new('non-detailed error') }
let(:expected_error) { GRPC::Internal }
- let(:expected_error_message) { }
+ let(:expected_error_message) {}
it_behaves_like '#user_cherry_pick with a gRPC error'
end
@@ -813,4 +813,146 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
end
end
end
+
+ describe '#add_tag' do
+ let(:tag_name) { 'some-tag' }
+ let(:tag_message) { nil }
+ let(:target) { 'master' }
+
+ subject(:add_tag) do
+ client.add_tag(tag_name, user, target, tag_message)
+ end
+
+ context 'without tag message' do
+ let(:tag_name) { 'lightweight-tag' }
+
+ it 'creates a lightweight tag' do
+ tag = add_tag
+ expect(tag.name).to eq(tag_name)
+ expect(tag.message).to eq('')
+ end
+ end
+
+ context 'with tag message' do
+ let(:tag_name) { 'annotated-tag' }
+ let(:tag_message) { "tag message" }
+
+ it 'creates an annotated tag' do
+ tag = add_tag
+ expect(tag.name).to eq(tag_name)
+ expect(tag.message).to eq(tag_message)
+ end
+ end
+
+ context 'with preexisting tag' do
+ let(:tag_name) { 'v1.0.0' }
+
+ it 'raises a TagExistsError' do
+ expect { add_tag }.to raise_error(Gitlab::Git::Repository::TagExistsError)
+ end
+ end
+
+ context 'with invalid target' do
+ let(:target) { 'refs/heads/does-not-exist' }
+
+ it 'raises an InvalidRef error' do
+ expect { add_tag }.to raise_error(Gitlab::Git::Repository::InvalidRef)
+ end
+ end
+
+ context 'with pre-receive error' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_create_tag)
+ .and_return(Gitaly::UserCreateTagResponse.new(pre_receive_error: "GitLab: something failed"))
+ end
+
+ it 'raises a PreReceiveError' do
+ expect { add_tag }.to raise_error(Gitlab::Git::PreReceiveError, "something failed")
+ end
+ end
+
+ context 'with internal error' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_create_tag)
+ .and_raise(GRPC::Internal.new('undetailed internal error'))
+ end
+
+ it 'raises an Internal error' do
+ expect { add_tag }.to raise_error do |error|
+ expect(error).to be_a(GRPC::Internal)
+ expect(error.details).to eq('undetailed internal error')
+ end
+ end
+ end
+
+ context 'with structured errors' do
+ before do
+ expect_any_instance_of(Gitaly::OperationService::Stub)
+ .to receive(:user_create_tag)
+ .and_raise(structured_error)
+ end
+
+ context 'with ReferenceExistsError' do
+ let(:structured_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::ALREADY_EXISTS,
+ 'tag exists already',
+ Gitaly::UserCreateTagError.new(
+ reference_exists: Gitaly::ReferenceExistsError.new(
+ reference_name: tag_name,
+ oid: 'something'
+ )))
+ end
+
+ it 'raises a TagExistsError' do
+ expect { add_tag }.to raise_error(Gitlab::Git::Repository::TagExistsError)
+ end
+ end
+
+ context 'with AccessCheckError' do
+ let(:structured_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ "error creating tag",
+ Gitaly::UserCreateTagError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: "You are not allowed to create this tag.",
+ protocol: "web",
+ user_id: "user-15",
+ changes: "df15b32277d2c55c6c595845a87109b09c913c556 5d6e0f935ad9240655f64e883cd98fad6f9a17ee refs/tags/v1.0.0\n"
+ )))
+ end
+
+ it 'raises a PreReceiveError' do
+ expect { add_tag }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq("You are not allowed to create this tag.")
+ end
+ end
+ end
+
+ context 'with CustomHookError' do
+ let(:structured_error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::PERMISSION_DENIED,
+ "custom hook error",
+ Gitaly::UserCreateTagError.new(
+ custom_hook: Gitaly::CustomHookError.new(
+ stdout: "some stdout",
+ stderr: "GitLab: some custom hook error message",
+ hook_type: Gitaly::CustomHookError::HookType::HOOK_TYPE_PRERECEIVE
+ )))
+ end
+
+ it 'raises a PreReceiveError' do
+ expect { add_tag }.to raise_error do |error|
+ expect(error).to be_a(Gitlab::Git::PreReceiveError)
+ expect(error.message).to eq("some custom hook error message")
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index 566bdbacf4a..277276bb1d3 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -120,6 +120,28 @@ RSpec.describe Gitlab::GitalyClient::RefService do
expect(client.find_tag('')).to be_nil
end
end
+
+ context 'when Gitaly returns an Internal error' do
+ it 'raises an Internal error' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_tag)
+ .and_raise(GRPC::Internal.new('something went wrong'))
+
+ expect { client.find_tag('v1.0.0') }.to raise_error(GRPC::Internal)
+ end
+ end
+
+ context 'when Gitaly returns a tag_not_found error' do
+ it 'raises an UnknownRef error' do
+ expect_any_instance_of(Gitaly::RefService::Stub)
+ .to receive(:find_tag)
+ .and_raise(new_detailed_error(GRPC::Core::StatusCodes::NOT_FOUND,
+ "tag was not found",
+ Gitaly::FindTagError.new(tag_not_found: Gitaly::ReferenceNotFoundError.new)))
+
+ expect { client.find_tag('v1.0.0') }.to raise_error(Gitlab::Git::UnknownRef, 'tag does not exist: v1.0.0')
+ end
+ end
end
describe '#default_branch_name' do
@@ -286,7 +308,7 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
context 'with a invalid format error' do
- let(:invalid_refs) {['\invali.\d/1', '\.invali/d/2']}
+ let(:invalid_refs) { ['\invali.\d/1', '\.invali/d/2'] }
let(:invalid_reference_format_error) do
new_detailed_error(
GRPC::Core::StatusCodes::INVALID_ARGUMENT,
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 39de9a65390..63d32cb906f 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -276,32 +276,12 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
describe '#disconnect_alternates' do
- let(:project) { create(:project, :repository) }
- let(:repository) { project.repository }
- let(:repository_path) { File.join(TestEnv.repos_path, repository.relative_path) }
- let(:pool_repository) { create(:pool_repository) }
- let(:object_pool) { pool_repository.object_pool }
- let(:object_pool_service) { Gitlab::GitalyClient::ObjectPoolService.new(object_pool) }
-
- before do
- object_pool_service.create(repository) # rubocop:disable Rails/SaveBang
- object_pool_service.link_repository(repository)
- end
-
- it 'deletes the alternates file' do
- repository.disconnect_alternates
-
- alternates_file = File.join(repository_path, "objects", "info", "alternates")
+ it 'sends a disconnect_git_alternates message' do
+ expect_any_instance_of(Gitaly::ObjectPoolService::Stub)
+ .to receive(:disconnect_git_alternates)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- expect(File.exist?(alternates_file)).to be_falsey
- end
-
- context 'when called twice' do
- it "doesn't raise an error" do
- repository.disconnect_alternates
-
- expect { repository.disconnect_alternates }.not_to raise_error
- end
+ client.disconnect_alternates
end
end
@@ -351,4 +331,16 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
client.set_full_path(path)
end
end
+
+ describe '#full_path' do
+ let(:path) { 'repo/path' }
+
+ it 'sends a full_path message' do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:full_path)
+ .and_return(double(path: path))
+
+ expect(client.full_path).to eq(path)
+ end
+ end
end
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index c4d05e92633..2bd3910ad87 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe Gitlab::GithubImport::Client do
expect(client).to receive(:requests_remaining?).and_return(true)
- client.with_rate_limit { }
+ client.with_rate_limit {}
end
it 'ignores rate limiting when disabled' do
diff --git a/spec/lib/gitlab/github_import/importer/events/base_importer_spec.rb b/spec/lib/gitlab/github_import/importer/events/base_importer_spec.rb
new file mode 100644
index 00000000000..41fe5fbdbbd
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/base_importer_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::BaseImporter do
+ let(:project) { instance_double('Project') }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:issue_event) { instance_double('Gitlab::GithubImport::Representation::IssueEvent') }
+ let(:importer_class) { Class.new(described_class) }
+ let(:importer_instance) { importer_class.new(project, client) }
+
+ describe '#execute' do
+ it { expect { importer_instance.execute(issue_event) }.to raise_error(NotImplementedError) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_assignee_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_assignee_spec.rb
new file mode 100644
index 00000000000..2f6f727dc38
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/changed_assignee_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedAssignee do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:assigner) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:issue) { create(:issue, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'event' => event_type,
+ 'commit_id' => nil,
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'assigner' => { 'id' => assigner.id, 'login' => assigner.username },
+ 'assignee' => { 'id' => assignee.id, 'login' => assignee.username },
+ 'issue' => { 'number' => issue.iid }
+ )
+ end
+
+ let(:note_attrs) do
+ {
+ noteable_id: issue.id,
+ noteable_type: Issue.name,
+ project_id: project.id,
+ author_id: assigner.id,
+ system: true,
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ let(:expected_system_note_metadata_attrs) do
+ {
+ action: "assignee",
+ created_at: issue_event.created_at,
+ updated_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ shared_examples 'new note' do
+ it 'creates expected note' do
+ expect { importer.execute(issue_event) }.to change { issue.notes.count }
+ .from(0).to(1)
+
+ expect(issue.notes.last)
+ .to have_attributes(expected_note_attrs)
+ end
+
+ it 'creates expected system note metadata' do
+ expect { importer.execute(issue_event) }.to change { SystemNoteMetadata.count }
+ .from(0).to(1)
+
+ expect(SystemNoteMetadata.last)
+ .to have_attributes(
+ expected_system_note_metadata_attrs.merge(
+ note_id: Note.last.id
+ )
+ )
+ end
+ end
+
+ describe '#execute' do
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(assignee.id, assignee.username).and_return(assignee.id)
+ allow(finder).to receive(:find).with(assigner.id, assigner.username).and_return(assigner.id)
+ end
+ end
+
+ context 'when importing an assigned event' do
+ let(:event_type) { 'assigned' }
+ let(:expected_note_attrs) { note_attrs.merge(note: "assigned to @#{assignee.username}") }
+
+ it_behaves_like 'new note'
+ end
+
+ context 'when importing an unassigned event' do
+ let(:event_type) { 'unassigned' }
+ let(:expected_note_attrs) { note_attrs.merge(note: "unassigned @#{assigner.username}") }
+
+ it_behaves_like 'new note'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
index b773598853d..e21672aa430 100644
--- a/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/changed_label_spec.rb
@@ -3,23 +3,25 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
- subject(:importer) { described_class.new(project, user.id) }
+ subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issue) { create(:issue, project: project) }
let!(:label) { create(:label, project: project) }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
'id' => 6501124486,
- 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'actor' => { 'id' => user.id, 'login' => user.username },
'event' => event_type,
'commit_id' => nil,
'label_title' => label.title,
'issue_db_id' => issue.id,
- 'created_at' => '2022-04-26 18:30:53 UTC'
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'issue' => { 'number' => issue.iid }
)
end
@@ -43,6 +45,12 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedLabel do
before do
allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(label.id)
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
end
context 'when importing a labeled event' do
diff --git a/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
new file mode 100644
index 00000000000..2687627fc23
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/events/changed_milestone_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::Events::ChangedMilestone do
+ subject(:importer) { described_class.new(project, client) }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
+ let(:issue) { create(:issue, project: project) }
+ let!(:milestone) { create(:milestone, project: project) }
+
+ let(:issue_event) do
+ Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
+ 'id' => 6501124486,
+ 'actor' => { 'id' => user.id, 'login' => user.username },
+ 'event' => event_type,
+ 'commit_id' => nil,
+ 'milestone_title' => milestone.title,
+ 'issue_db_id' => issue.id,
+ 'created_at' => '2022-04-26 18:30:53 UTC',
+ 'issue' => { 'number' => issue.iid }
+ )
+ end
+
+ let(:event_attrs) do
+ {
+ user_id: user.id,
+ issue_id: issue.id,
+ milestone_id: milestone.id,
+ state: 'opened',
+ created_at: issue_event.created_at
+ }.stringify_keys
+ end
+
+ shared_examples 'new event' do
+ it 'creates a new milestone event' do
+ expect { importer.execute(issue_event) }.to change { issue.resource_milestone_events.count }
+ .from(0).to(1)
+ expect(issue.resource_milestone_events.last)
+ .to have_attributes(expected_event_attrs)
+ end
+ end
+
+ describe '#execute' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:read_integer).and_return(milestone.id)
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
+ context 'when importing a milestoned event' do
+ let(:event_type) { 'milestoned' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'add') }
+
+ it_behaves_like 'new event'
+ end
+
+ context 'when importing demilestoned event' do
+ let(:event_type) { 'demilestoned' }
+ let(:expected_event_attrs) { event_attrs.merge(action: 'remove') }
+
+ it_behaves_like 'new event'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/events/closed_spec.rb b/spec/lib/gitlab/github_import/importer/events/closed_spec.rb
index 116917d3e06..9a49d80a8bb 100644
--- a/spec/lib/gitlab/github_import/importer/events/closed_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/closed_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::Closed do
- subject(:importer) { described_class.new(project, user.id) }
+ subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issue) { create(:issue, project: project) }
let(:commit_id) { nil }
@@ -16,11 +17,11 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Closed do
'id' => 6501124486,
'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
- 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'actor' => { 'id' => user.id, 'login' => user.username },
'event' => 'closed',
'created_at' => '2022-04-26 18:30:53 UTC',
'commit_id' => commit_id,
- 'issue_db_id' => issue.id
+ 'issue' => { 'number' => issue.iid }
)
end
@@ -45,6 +46,15 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Closed do
}.stringify_keys
end
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
it 'creates expected event and state event' do
importer.execute(issue_event)
diff --git a/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb b/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb
index 118c482a7d9..68e001c7364 100644
--- a/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/cross_referenced_spec.rb
@@ -3,15 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_gitlab_redis_cache do
- subject(:importer) { described_class.new(project, user.id) }
+ subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:sawyer_stub) { Struct.new(:iid, :issuable_type, keyword_init: true) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
- let(:issue) { create(:issue, project: project) }
- let(:referenced_in) { build_stubbed(:issue, project: project) }
+ let(:issue_iid) { 999 }
+ let(:issue) { create(:issue, project: project, iid: issue_iid) }
+ let(:referenced_in) { build_stubbed(:issue, project: project, iid: issue_iid + 1) }
let(:commit_id) { nil }
let(:issue_event) do
@@ -19,7 +20,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
'id' => 6501124486,
'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
- 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'actor' => { 'id' => user.id, 'login' => user.username },
'event' => 'cross-referenced',
'source' => {
'type' => 'issue',
@@ -29,7 +30,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
}
},
'created_at' => '2022-04-26 18:30:53 UTC',
- 'issue_db_id' => issue.id
+ 'issue' => { 'number' => issue.iid }
)
end
@@ -38,7 +39,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
{
system: true,
noteable_type: Issue.name,
- noteable_id: issue_event.issue_db_id,
+ noteable_id: issue.id,
project_id: project.id,
author_id: user.id,
note: expected_note_body,
@@ -47,12 +48,16 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
end
context 'when referenced in other issue' do
- let(:expected_note_body) { "mentioned in issue ##{issue.iid}" }
+ let(:expected_note_body) { "mentioned in issue ##{referenced_in.iid}" }
before do
- other_issue_resource = sawyer_stub.new(iid: referenced_in.iid, issuable_type: 'Issue')
- Gitlab::GithubImport::IssuableFinder.new(project, other_issue_resource)
- .cache_database_id(referenced_in.iid)
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(referenced_in.iid)
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
end
it 'creates expected note' do
@@ -71,10 +76,13 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
let(:expected_note_body) { "mentioned in merge request !#{referenced_in.iid}" }
before do
- other_issue_resource =
- sawyer_stub.new(iid: referenced_in.iid, issuable_type: 'MergeRequest')
- Gitlab::GithubImport::IssuableFinder.new(project, other_issue_resource)
- .cache_database_id(referenced_in.iid)
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(referenced_in.iid)
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
end
it 'creates expected note' do
@@ -87,7 +95,7 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::CrossReferenced, :clean_g
end
context 'when referenced in out of project issue/pull_request' do
- it 'creates expected note' do
+ it 'does not create expected note' do
importer.execute(issue_event)
expect(issue.notes.count).to eq 0
diff --git a/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb b/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb
index a8c3fbcb05d..316ea798965 100644
--- a/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/renamed_spec.rb
@@ -3,23 +3,24 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::Renamed do
- subject(:importer) { described_class.new(project, user.id) }
+ subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:issue) { create(:issue, project: project) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issue_event) do
Gitlab::GithubImport::Representation::IssueEvent.from_json_hash(
'id' => 6501124486,
- 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'actor' => { 'id' => user.id, 'login' => user.username },
'event' => 'renamed',
'commit_id' => nil,
'created_at' => '2022-04-26 18:30:53 UTC',
'old_title' => 'old title',
'new_title' => 'new title',
- 'issue_db_id' => issue.id
+ 'issue' => { 'number' => issue.iid }
)
end
@@ -45,6 +46,15 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Renamed do
end
describe '#execute' do
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
it 'creates expected note' do
expect { importer.execute(issue_event) }.to change { issue.notes.count }
.from(0).to(1)
diff --git a/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb b/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb
index 81653b0ecdc..2461dbb9701 100644
--- a/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/events/reopened_spec.rb
@@ -3,11 +3,12 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::Events::Reopened, :aggregate_failures do
- subject(:importer) { described_class.new(project, user.id) }
+ subject(:importer) { described_class.new(project, client) }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+ let(:client) { instance_double('Gitlab::GithubImport::Client') }
let(:issue) { create(:issue, project: project) }
let(:issue_event) do
@@ -15,10 +16,10 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Reopened, :aggregate_fail
'id' => 6501124486,
'node_id' => 'CE_lADOHK9fA85If7x0zwAAAAGDf0mG',
'url' => 'https://api.github.com/repos/elhowm/test-import/issues/events/6501124486',
- 'actor' => { 'id' => 4, 'login' => 'alice' },
+ 'actor' => { 'id' => user.id, 'login' => user.username },
'event' => 'reopened',
'created_at' => '2022-04-26 18:30:53 UTC',
- 'issue_db_id' => issue.id
+ 'issue' => { 'number' => issue.iid }
)
end
@@ -42,6 +43,15 @@ RSpec.describe Gitlab::GithubImport::Importer::Events::Reopened, :aggregate_fail
}.stringify_keys
end
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder|
+ allow(finder).to receive(:database_id).and_return(issue.id)
+ end
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(user.id, user.username).and_return(user.id)
+ end
+ end
+
it 'creates expected event and state event' do
importer.execute(issue_event)
diff --git a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
index da32a3b3766..33d5fbf13a0 100644
--- a/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_event_importer_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
specific_importer = double(importer_class.name) # rubocop:disable RSpec/VerifiedDoubles
expect(importer_class)
- .to receive(:new).with(project, user.id)
+ .to receive(:new).with(project, client)
.and_return(specific_importer)
expect(specific_importer).to receive(:execute).with(issue_event)
@@ -43,12 +43,6 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
describe '#execute' do
before do
- allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
- allow(finder).to receive(:author_id_for)
- .with(issue_event, author_key: :actor)
- .and_return(user.id, true)
- end
-
issue_event.attributes[:issue_db_id] = issue.id
end
@@ -87,6 +81,20 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
Gitlab::GithubImport::Importer::Events::Renamed
end
+ context "when it's milestoned issue event" do
+ let(:event_name) { 'milestoned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ end
+
+ context "when it's demilestoned issue event" do
+ let(:event_name) { 'demilestoned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedMilestone
+ end
+
context "when it's cross-referenced issue event" do
let(:event_name) { 'cross-referenced' }
@@ -94,6 +102,20 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueEventImporter, :clean_gitlab
Gitlab::GithubImport::Importer::Events::CrossReferenced
end
+ context "when it's assigned issue event" do
+ let(:event_name) { 'assigned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ end
+
+ context "when it's unassigned issue event" do
+ let(:event_name) { 'unassigned' }
+
+ it_behaves_like 'triggers specific event importer',
+ Gitlab::GithubImport::Importer::Events::ChangedAssignee
+ end
+
context "when it's unknown issue event" do
let(:event_name) { 'fake' }
diff --git a/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
new file mode 100644
index 00000000000..8d4c1b01e50
--- /dev/null
+++ b/spec/lib/gitlab/github_import/importer/issue_events_importer_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GithubImport::Importer::IssueEventsImporter do
+ subject(:importer) { described_class.new(project, client, parallel: parallel) }
+
+ let(:project) { instance_double(Project, id: 4, import_source: 'foo/bar') }
+ let(:client) { instance_double(Gitlab::GithubImport::Client) }
+
+ let(:parallel) { true }
+ let(:issue_event) do
+ struct = Struct.new(
+ :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone,
+ :source, :assignee, :assigner, :issue, :created_at, :performed_via_github_app,
+ keyword_init: true
+ )
+ struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
+ end
+
+ describe '#parallel?' do
+ context 'when running in parallel mode' do
+ it { expect(importer).to be_parallel }
+ end
+
+ context 'when running in sequential mode' do
+ let(:parallel) { false }
+
+ it { expect(importer).not_to be_parallel }
+ end
+ end
+
+ describe '#execute' do
+ context 'when running in parallel mode' do
+ it 'imports events in parallel' do
+ expect(importer).to receive(:parallel_import)
+
+ importer.execute
+ end
+ end
+
+ context 'when running in sequential mode' do
+ let(:parallel) { false }
+
+ it 'imports notes in sequence' do
+ expect(importer).to receive(:sequential_import)
+
+ importer.execute
+ end
+ end
+ end
+
+ describe '#sequential_import' do
+ let(:parallel) { false }
+
+ it 'imports each event in sequence' do
+ event_importer = instance_double(Gitlab::GithubImport::Importer::IssueEventImporter)
+
+ allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
+
+ expect(Gitlab::GithubImport::Importer::IssueEventImporter)
+ .to receive(:new)
+ .with(
+ an_instance_of(Gitlab::GithubImport::Representation::IssueEvent),
+ project,
+ client
+ )
+ .and_return(event_importer)
+
+ expect(event_importer).to receive(:execute)
+
+ importer.sequential_import
+ end
+ end
+
+ describe '#parallel_import' do
+ it 'imports each note in parallel' do
+ allow(importer).to receive(:each_object_to_import).and_yield(issue_event)
+
+ expect(Gitlab::GithubImport::ImportIssueEventWorker).to receive(:bulk_perform_in).with(
+ 1.second, [
+ [project.id, an_instance_of(Hash), an_instance_of(String)]
+ ], batch_size: 1000, batch_delay: 1.minute
+ )
+
+ waiter = importer.parallel_import
+
+ expect(waiter).to be_an_instance_of(Gitlab::JobWaiter)
+ expect(waiter.jobs_remaining).to eq(1)
+ end
+ end
+
+ describe '#importer_class' do
+ it { expect(importer.importer_class).to eq Gitlab::GithubImport::Importer::IssueEventImporter }
+ end
+
+ describe '#representation_class' do
+ it { expect(importer.representation_class).to eq Gitlab::GithubImport::Representation::IssueEvent }
+ end
+
+ describe '#sidekiq_worker_class' do
+ it { expect(importer.sidekiq_worker_class).to eq Gitlab::GithubImport::ImportIssueEventWorker }
+ end
+
+ describe '#object_type' do
+ it { expect(importer.object_type).to eq :issue_event }
+ end
+
+ describe '#collection_method' do
+ it { expect(importer.collection_method).to eq :repository_issue_events }
+ end
+
+ describe '#id_for_already_imported_cache' do
+ it 'returns the ID of the given note' do
+ expect(importer.id_for_already_imported_cache(issue_event)).to eq(issue_event.id)
+ end
+ end
+
+ describe '#collection_options' do
+ it { expect(importer.collection_options).to eq({}) }
+ end
+end
diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
index 570d26cdf2d..1692aac49f2 100644
--- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redis_cache do
+ let_it_be(:work_item_type_id) { ::WorkItems::Type.default_issue_type.id }
+
let(:project) { create(:project) }
let(:client) { double(:client) }
let(:user) { create(:user) }
@@ -25,7 +27,8 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
author: Gitlab::GithubImport::Representation::User.new(id: 4, login: 'alice'),
created_at: created_at,
updated_at: updated_at,
- pull_request: false
+ pull_request: false,
+ work_item_type_id: work_item_type_id
)
end
@@ -116,6 +119,17 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
.and_return(milestone.id)
end
+ it 'creates issues with a work item type id' do
+ allow(importer.user_finder)
+ .to receive(:author_id_for)
+ .with(issue)
+ .and_return([user.id, true])
+
+ issue_id = importer.create_issue
+
+ expect(Issue.find(issue_id).work_item_type_id).to eq(work_item_type_id)
+ end
+
context 'when the issue author could be found' do
it 'creates the issue with the found author as the issue author' do
allow(importer.user_finder)
@@ -136,7 +150,8 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
milestone_id: milestone.id,
state_id: 1,
created_at: created_at,
- updated_at: updated_at
+ updated_at: updated_at,
+ work_item_type_id: work_item_type_id
},
project.issues
)
@@ -166,7 +181,8 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi
milestone_id: milestone.id,
state_id: 1,
created_at: created_at,
- updated_at: updated_at
+ updated_at: updated_at,
+ work_item_type_id: work_item_type_id
},
project.issues
)
diff --git a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
index 6dfd4424342..251829b83a0 100644
--- a/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/lfs_objects_importer_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Gitlab::GithubImport::Importer::LfsObjectsImporter do
let(:lfs_attributes) do
{
- oid: 'oid',
+ oid: 'a' * 64,
size: 1,
link: 'http://www.gitlab.com/lfs_objects/oid'
}
diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
index c1b0f4df29a..c5846fa7a87 100644
--- a/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_requests_importer_spec.rb
@@ -149,7 +149,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsImporter do
expect(importer)
.to receive(:update_repository)
- importer.each_object_to_import { }
+ importer.each_object_to_import {}
end
end
diff --git a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
index 087faeffe02..bb1ee79ad93 100644
--- a/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
describe '#each_object_to_import', :clean_gitlab_redis_cache do
let(:issue_event) do
- struct = Struct.new(:id, :event, :created_at, :issue_db_id, keyword_init: true)
+ struct = Struct.new(:id, :event, :created_at, :issue, keyword_init: true)
struct.new(id: rand(10), event: 'closed', created_at: '2022-04-26 18:30:53 UTC')
end
@@ -81,7 +81,7 @@ RSpec.describe Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter
counter = 0
subject.each_object_to_import do |object|
expect(object).to eq issue_event
- expect(issue_event.issue_db_id).to eq issue.id
+ expect(issue_event.issue['number']).to eq issue.iid
counter += 1
end
expect(counter).to eq 1
diff --git a/spec/lib/gitlab/github_import/issuable_finder_spec.rb b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
index 3afd006109b..d550f15e8c5 100644
--- a/spec/lib/gitlab/github_import/issuable_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/issuable_finder_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::IssuableFinder, :clean_gitlab_redis_cache do
let(:project) { double(:project, id: 4, group: nil) }
let(:issue) do
- double(:issue, issuable_type: MergeRequest, iid: 1)
+ double(:issue, issuable_type: MergeRequest, issuable_id: 1)
end
let(:finder) { described_class.new(project, issue) }
diff --git a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
index 999f8ffb21e..738e7c88d7d 100644
--- a/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
+++ b/spec/lib/gitlab/github_import/parallel_scheduling_spec.rb
@@ -243,7 +243,7 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
expect(repr_class)
.to receive(:from_api_response)
- .with(object)
+ .with(object, {})
.and_return(repr_instance)
expect(importer)
@@ -281,7 +281,7 @@ RSpec.describe Gitlab::GithubImport::ParallelScheduling do
allow(repr_class)
.to receive(:from_api_response)
- .with(object)
+ .with(object, {})
.and_return({ title: 'Foo' })
end
diff --git a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
index 23da8276f64..d3a98035e73 100644
--- a/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_event_spec.rb
@@ -25,8 +25,8 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
expect(issue_event.source).to eq({ type: 'issue', id: 123456 })
end
- it 'includes the issue_db_id' do
- expect(issue_event.issue_db_id).to eq(100500)
+ it 'includes the issue data' do
+ expect(issue_event.issue).to eq({ number: 2, pull_request: pull_request })
end
context 'when actor data present' do
@@ -77,11 +77,66 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
end
end
+ context 'when milestone data is present' do
+ it 'includes the milestone_title' do
+ expect(issue_event.milestone_title).to eq('milestone title')
+ end
+ end
+
+ context 'when milestone data is empty' do
+ let(:with_milestone) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.milestone_title).to eq nil
+ end
+ end
+
+ context 'when assignee and assigner data is present' do
+ it 'includes assignee and assigner details' do
+ expect(issue_event.assignee)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+ expect(issue_event.assignee.id).to eq(5)
+ expect(issue_event.assignee.login).to eq('tom')
+
+ expect(issue_event.assigner)
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+ expect(issue_event.assigner.id).to eq(6)
+ expect(issue_event.assigner.login).to eq('jerry')
+ end
+ end
+
+ context 'when assignee and assigner data is empty' do
+ let(:with_assignee) { false }
+
+ it 'does not return such info' do
+ expect(issue_event.assignee).to eq nil
+ expect(issue_event.assigner).to eq nil
+ end
+ end
+
it 'includes the created timestamp' do
expect(issue_event.created_at).to eq('2022-04-26 18:30:53 UTC')
end
end
+ describe '#issuable_id' do
+ it 'returns issuable_id' do
+ expect(issue_event.issuable_id).to eq(2)
+ end
+ end
+
+ describe '#issuable_type' do
+ context 'when event related to issue' do
+ it { expect(issue_event.issuable_type).to eq('Issue') }
+ end
+
+ context 'when event related to pull request' do
+ let(:pull_request) { { url: FFaker::Internet.http_url } }
+
+ it { expect(issue_event.issuable_type).to eq('MergeRequest') }
+ end
+ end
+
describe '#github_identifiers' do
it 'returns a hash with needed identifiers' do
expect(issue_event.github_identifiers).to eq({ id: 6501124486 })
@@ -92,8 +147,8 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
describe '.from_api_response' do
let(:response) do
event_resource = Struct.new(
- :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label,
- :rename, :issue_db_id, :created_at, :performed_via_github_app, :source,
+ :id, :node_id, :url, :actor, :event, :commit_id, :commit_url, :label, :rename, :milestone,
+ :source, :assignee, :assigner, :issue, :created_at, :performed_via_github_app,
keyword_init: true
)
user_resource = Struct.new(:id, :login, keyword_init: true)
@@ -106,10 +161,13 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
commit_id: '570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
commit_url: 'https://api.github.com/repos/octocat/Hello-World/commits'\
'/570e7b2abdd848b95f2f578043fc23bd6f6fd24d',
+ label: with_label ? { name: 'label title' } : nil,
rename: with_rename ? { from: 'old title', to: 'new title' } : nil,
+ milestone: with_milestone ? { title: 'milestone title' } : nil,
source: { type: 'issue', id: 123456 },
- issue_db_id: 100500,
- label: with_label ? { name: 'label title' } : nil,
+ assignee: with_assignee ? user_resource.new(id: 5, login: 'tom') : nil,
+ assigner: with_assignee ? user_resource.new(id: 6, login: 'jerry') : nil,
+ issue: { 'number' => 2, 'pull_request' => pull_request },
created_at: '2022-04-26 18:30:53 UTC',
performed_via_github_app: nil
)
@@ -118,6 +176,9 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
+ let(:with_milestone) { true }
+ let(:with_assignee) { true }
+ let(:pull_request) { nil }
it_behaves_like 'an IssueEvent' do
let(:issue_event) { described_class.from_api_response(response) }
@@ -139,8 +200,11 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
'label_title' => (with_label ? 'label title' : nil),
'old_title' => with_rename ? 'old title' : nil,
'new_title' => with_rename ? 'new title' : nil,
+ 'milestone_title' => (with_milestone ? 'milestone title' : nil),
'source' => { 'type' => 'issue', 'id' => 123456 },
- "issue_db_id" => 100500,
+ 'assignee' => (with_assignee ? { 'id' => 5, 'login' => 'tom' } : nil),
+ 'assigner' => (with_assignee ? { 'id' => 6, 'login' => 'jerry' } : nil),
+ 'issue' => { 'number' => 2, 'pull_request' => pull_request },
'created_at' => '2022-04-26 18:30:53 UTC',
'performed_via_github_app' => nil
}
@@ -149,6 +213,9 @@ RSpec.describe Gitlab::GithubImport::Representation::IssueEvent do
let(:with_actor) { true }
let(:with_label) { true }
let(:with_rename) { true }
+ let(:with_milestone) { true }
+ let(:with_assignee) { true }
+ let(:pull_request) { nil }
let(:issue_event) { described_class.from_json_hash(hash) }
end
diff --git a/spec/lib/gitlab/github_import/representation/issue_spec.rb b/spec/lib/gitlab/github_import/representation/issue_spec.rb
index f3052efea70..5898518343a 100644
--- a/spec/lib/gitlab/github_import/representation/issue_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/issue_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Representation::Issue do
+ let_it_be(:work_item_type_id) { ::WorkItems::Type.default_issue_type.id }
+
let(:created_at) { Time.new(2017, 1, 1, 12, 00) }
let(:updated_at) { Time.new(2017, 1, 1, 12, 15) }
@@ -60,6 +62,10 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
expect(issue.updated_at).to eq(updated_at)
end
+ it 'includes the work_item_type_id' do
+ expect(issue.work_item_type_id).to eq(work_item_type_id)
+ end
+
it 'is not a pull request' do
expect(issue.pull_request?).to eq(false)
end
@@ -84,8 +90,10 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
)
end
+ let(:additional_data) { { work_item_type_id: work_item_type_id } }
+
it_behaves_like 'an Issue' do
- let(:issue) { described_class.from_api_response(response) }
+ let(:issue) { described_class.from_api_response(response, additional_data) }
end
it 'does not set the user if the response did not include a user' do
@@ -93,7 +101,7 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
.to receive(:user)
.and_return(nil)
- issue = described_class.from_api_response(response)
+ issue = described_class.from_api_response(response, additional_data)
expect(issue.author).to be_nil
end
@@ -113,7 +121,8 @@ RSpec.describe Gitlab::GithubImport::Representation::Issue do
'author' => { 'id' => 4, 'login' => 'alice' },
'created_at' => created_at.to_s,
'updated_at' => updated_at.to_s,
- 'pull_request' => false
+ 'pull_request' => false,
+ 'work_item_type_id' => work_item_type_id
}
end
diff --git a/spec/lib/gitlab/github_import/user_finder_spec.rb b/spec/lib/gitlab/github_import/user_finder_spec.rb
index 8eb6eedd72d..d85e298785c 100644
--- a/spec/lib/gitlab/github_import/user_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/user_finder_spec.rb
@@ -15,32 +15,64 @@ RSpec.describe Gitlab::GithubImport::UserFinder, :clean_gitlab_redis_cache do
let(:finder) { described_class.new(project, client) }
describe '#author_id_for' do
- it 'returns the user ID for the author of an object' do
- user = double(:user, id: 4, login: 'kittens')
- note = double(:note, author: user)
+ context 'with default author_key' do
+ it 'returns the user ID for the author of an object' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
- expect(finder).to receive(:user_id_for).with(user).and_return(42)
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
- expect(finder.author_id_for(note)).to eq([42, true])
- end
+ expect(finder.author_id_for(note)).to eq([42, true])
+ end
- it 'returns the ID of the project creator if no user ID could be found' do
- user = double(:user, id: 4, login: 'kittens')
- note = double(:note, author: user)
+ it 'returns the ID of the project creator if no user ID could be found' do
+ user = double(:user, id: 4, login: 'kittens')
+ note = double(:note, author: user)
- expect(finder).to receive(:user_id_for).with(user).and_return(nil)
+ expect(finder).to receive(:user_id_for).with(user).and_return(nil)
- expect(finder.author_id_for(note)).to eq([project.creator_id, false])
- end
+ expect(finder.author_id_for(note)).to eq([project.creator_id, false])
+ end
+
+ it 'returns the ID of the ghost user when the object has no user' do
+ note = double(:note, author: nil)
- it 'returns the ID of the ghost user when the object has no user' do
- note = double(:note, author: nil)
+ expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
+ end
- expect(finder.author_id_for(note)).to eq([User.ghost.id, true])
+ it 'returns the ID of the ghost user when the given object is nil' do
+ expect(finder.author_id_for(nil)).to eq([User.ghost.id, true])
+ end
end
- it 'returns the ID of the ghost user when the given object is nil' do
- expect(finder.author_id_for(nil)).to eq([User.ghost.id, true])
+ context 'with a non-default author_key' do
+ let(:user) { double(:user, id: 4, login: 'kittens') }
+
+ shared_examples 'user ID finder' do |author_key|
+ it 'returns the user ID for an object' do
+ expect(finder).to receive(:user_id_for).with(user).and_return(42)
+
+ expect(finder.author_id_for(issue_event, author_key: author_key)).to eq([42, true])
+ end
+ end
+
+ context 'when the author_key parameter is :actor' do
+ let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', actor: user) }
+
+ it_behaves_like 'user ID finder', :actor
+ end
+
+ context 'when the author_key parameter is :assignee' do
+ let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', assignee: user) }
+
+ it_behaves_like 'user ID finder', :assignee
+ end
+
+ context 'when the author_key parameter is :assigner' do
+ let(:issue_event) { double('Gitlab::GithubImport::Representation::IssueEvent', assigner: user) }
+
+ it_behaves_like 'user ID finder', :assigner
+ end
end
end
diff --git a/spec/lib/gitlab/global_id/deprecations_spec.rb b/spec/lib/gitlab/global_id/deprecations_spec.rb
index 22a4766c0a0..3824473c95b 100644
--- a/spec/lib/gitlab/global_id/deprecations_spec.rb
+++ b/spec/lib/gitlab/global_id/deprecations_spec.rb
@@ -1,12 +1,21 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'graphql'
+require_relative '../../../../app/graphql/types/base_scalar'
+require_relative '../../../../app/graphql/types/global_id_type'
+require_relative '../../../support/helpers/global_id_deprecation_helpers'
RSpec.describe Gitlab::GlobalId::Deprecations do
include GlobalIDDeprecationHelpers
- let_it_be(:deprecation_1) { described_class::Deprecation.new(old_model_name: 'Foo::Model', new_model_name: 'Bar', milestone: '9.0') }
- let_it_be(:deprecation_2) { described_class::Deprecation.new(old_model_name: 'Baz', new_model_name: 'Qux::Model', milestone: '10.0') }
+ let(:deprecation_1) do
+ described_class::NameDeprecation.new(old_name: 'Foo::Model', new_name: 'Bar', milestone: '9.0')
+ end
+
+ let(:deprecation_2) do
+ described_class::NameDeprecation.new(old_name: 'Baz', new_name: 'Qux::Model', milestone: '10.0')
+ end
before do
stub_global_id_deprecations(deprecation_1, deprecation_2)
diff --git a/spec/lib/gitlab/gpg_spec.rb b/spec/lib/gitlab/gpg_spec.rb
index 72c6c8efb5e..e64555f1079 100644
--- a/spec/lib/gitlab/gpg_spec.rb
+++ b/spec/lib/gitlab/gpg_spec.rb
@@ -218,7 +218,7 @@ RSpec.describe Gitlab::Gpg do
expect(Retriable).to receive(:sleep).at_least(:twice)
expect(FileUtils).to receive(:remove_entry).with(tmp_dir).at_least(:twice).and_raise('Deletion failed')
- expect { described_class.using_tmp_keychain { } }.to raise_error(described_class::CleanupError)
+ expect { described_class.using_tmp_keychain {} }.to raise_error(described_class::CleanupError)
end
it 'does not attempt multiple times when the deletion succeeds' do
@@ -226,7 +226,7 @@ RSpec.describe Gitlab::Gpg do
expect(FileUtils).to receive(:remove_entry).with(tmp_dir).once.and_raise('Deletion failed')
expect(FileUtils).to receive(:remove_entry).with(tmp_dir).and_call_original
- expect { described_class.using_tmp_keychain { } }.not_to raise_error
+ expect { described_class.using_tmp_keychain {} }.not_to raise_error
expect(File.exist?(tmp_dir)).to be false
end
diff --git a/spec/lib/gitlab/grape_logging/loggers/token_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/token_logger_spec.rb
new file mode 100644
index 00000000000..d2022a28a90
--- /dev/null
+++ b/spec/lib/gitlab/grape_logging/loggers/token_logger_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::GrapeLogging::Loggers::TokenLogger do
+ subject { described_class.new }
+
+ describe ".parameters" do
+ let(:token_id) { 1 }
+ let(:token_type) { "PersonalAccessToken" }
+
+ describe 'when no token information is available' do
+ let(:mock_request) { instance_double(ActionDispatch::Request, 'env', env: {}) }
+
+ it 'returns an empty hash' do
+ expect(subject.parameters(mock_request, nil)).to eq({})
+ end
+ end
+
+ describe 'when token information is available' do
+ let(:mock_request) do
+ instance_double(ActionDispatch::Request, 'env',
+ env: {
+ 'gitlab.api.token' => { 'token_id': token_id, 'token_type': token_type }
+ }
+ )
+ end
+
+ it 'adds the token information to log parameters' do
+ expect(subject.parameters(mock_request, nil)).to eq( { 'token_id': 1, 'token_type': "PersonalAccessToken" })
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecation_spec.rb
index 2931e28a6ee..c9b47219198 100644
--- a/spec/lib/gitlab/graphql/deprecation_spec.rb
+++ b/spec/lib/gitlab/graphql/deprecation_spec.rb
@@ -6,30 +6,57 @@ require 'active_model'
RSpec.describe ::Gitlab::Graphql::Deprecation do
let(:options) { {} }
- subject(:deprecation) { described_class.parse(options) }
+ subject(:deprecation) { described_class.new(**options) }
describe '.parse' do
- context 'with nil' do
- let(:options) { nil }
+ subject(:parsed_deprecation) { described_class.parse(**options) }
- it 'parses to nil' do
- expect(deprecation).to be_nil
+ context 'with no arguments' do
+ it 'returns nil' do
+ expect(parsed_deprecation).to be_nil
end
end
- context 'with empty options' do
- let(:options) { {} }
+ context 'with an incomplete `deprecated` argument' do
+ let(:options) { { deprecated: {} } }
- it 'parses to an empty deprecation' do
- expect(deprecation).to eq(described_class.new)
+ it 'parses as an invalid deprecation' do
+ expect(parsed_deprecation).not_to be_valid
+ expect(parsed_deprecation).to eq(described_class.new)
end
end
- context 'with defined options' do
- let(:options) { { reason: :renamed, milestone: '10.10' } }
+ context 'with a `deprecated` argument' do
+ let(:options) { { deprecated: { reason: :renamed, milestone: '10.10' } } }
+
+ it 'parses as a deprecation' do
+ expect(parsed_deprecation).to be_valid
+ expect(parsed_deprecation).to eq(
+ described_class.new(reason: 'This was renamed', milestone: '10.10')
+ )
+ end
+ end
+
+ context 'with an `alpha` argument' do
+ let(:options) { { alpha: { milestone: '10.10' } } }
+
+ it 'parses as an alpha' do
+ expect(parsed_deprecation).to be_valid
+ expect(parsed_deprecation).to eq(
+ described_class.new(reason: :alpha, milestone: '10.10')
+ )
+ end
+ end
+
+ context 'with both `deprecated` and `alpha` arguments' do
+ let(:options) do
+ { alpha: { milestone: '10.10' }, deprecated: { reason: :renamed, milestone: '10.10' } }
+ end
- it 'assigns the properties' do
- expect(deprecation).to eq(described_class.new(reason: 'This was renamed', milestone: '10.10'))
+ it 'raises an error' do
+ expect { parsed_deprecation }.to raise_error(ArgumentError,
+ '`alpha` and `deprecated` arguments cannot be passed at the same time'
+ )
end
end
end
@@ -210,4 +237,20 @@ RSpec.describe ::Gitlab::Graphql::Deprecation do
end
end
end
+
+ describe '#alpha?' do
+ let(:options) { { milestone: '10.10', reason: reason } }
+
+ context 'when `reason` is `:alpha`' do
+ let(:reason) { described_class::REASON_ALPHA }
+
+ it { is_expected.to be_alpha }
+ end
+
+ context 'when `reason` is not `:alpha`' do
+ let(:reason) { described_class::REASON_RENAMED }
+
+ it { is_expected.not_to be_alpha }
+ end
+ end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb
deleted file mode 100644
index eecdaa3409f..00000000000
--- a/spec/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition_spec.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::Pagination::Keyset::Conditions::NotNullCondition do
- describe '#build' do
- let(:operators) { ['>', '>'] }
- let(:before_or_after) { :after }
- let(:condition) { described_class.new(arel_table, order_list, values, operators, before_or_after) }
-
- context 'when there is only one ordering field' do
- let(:arel_table) { Issue.arel_table }
- let(:order_list) { [double(named_function: nil, attribute_name: 'id')] }
- let(:values) { [500] }
- let(:operators) { ['>'] }
-
- it 'generates a single condition sql' do
- expected_sql = <<~SQL
- ("issues"."id" > 500)
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when ordering by a column attribute' do
- let(:arel_table) { Issue.arel_table }
- let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
- let(:values) { [1500, 500] }
-
- shared_examples ':after condition' do
- it 'generates :after sql' do
- expected_sql = <<~SQL
- ("issues"."relative_position" > 1500)
- OR (
- "issues"."relative_position" = 1500
- AND
- "issues"."id" > 500
- )
- OR ("issues"."relative_position" IS NULL)
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :after' do
- it_behaves_like ':after condition'
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates :before sql' do
- expected_sql = <<~SQL
- ("issues"."relative_position" > 1500)
- OR (
- "issues"."relative_position" = 1500
- AND
- "issues"."id" > 500
- )
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :foo' do
- let(:before_or_after) { :foo }
-
- it_behaves_like ':after condition'
- end
- end
-
- context 'when ordering by LOWER' do
- let(:arel_table) { Project.arel_table }
- let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
- let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) }
- let(:values) { ['Test', 500] }
-
- context 'when :after' do
- it 'generates :after sql' do
- expected_sql = <<~SQL
- (LOWER("projects"."name") > 'test')
- OR (
- LOWER("projects"."name") = 'test'
- AND
- "projects"."id" > 500
- )
- OR (LOWER("projects"."name") IS NULL)
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates :before sql' do
- expected_sql = <<~SQL
- (LOWER("projects"."name") > 'test')
- OR (
- LOWER("projects"."name") = 'test'
- AND
- "projects"."id" > 500
- )
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb
deleted file mode 100644
index 582f96299ec..00000000000
--- a/spec/lib/gitlab/graphql/pagination/keyset/conditions/null_condition_spec.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::Pagination::Keyset::Conditions::NullCondition do
- describe '#build' do
- let(:values) { [nil, 500] }
- let(:operators) { [nil, '>'] }
- let(:before_or_after) { :after }
- let(:condition) { described_class.new(arel_table, order_list, values, operators, before_or_after) }
-
- context 'when ordering by a column attribute' do
- let(:arel_table) { Issue.arel_table }
- let(:order_list) { [double(named_function: nil, attribute_name: 'relative_position'), double(named_function: nil, attribute_name: 'id')] }
-
- shared_examples ':after condition' do
- it 'generates sql' do
- expected_sql = <<~SQL
- (
- "issues"."relative_position" IS NULL
- AND
- "issues"."id" > 500
- )
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :after' do
- it_behaves_like ':after condition'
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates :before sql' do
- expected_sql = <<~SQL
- (
- "issues"."relative_position" IS NULL
- AND
- "issues"."id" > 500
- )
- OR ("issues"."relative_position" IS NOT NULL)
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :foo' do
- let(:before_or_after) { :foo }
-
- it_behaves_like ':after condition'
- end
- end
-
- context 'when ordering by LOWER' do
- let(:arel_table) { Project.arel_table }
- let(:relation) { Project.order(arel_table['name'].lower.asc).order(:id) }
- let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) }
-
- context 'when :after' do
- it 'generates sql' do
- expected_sql = <<~SQL
- (
- LOWER("projects"."name") IS NULL
- AND
- "projects"."id" > 500
- )
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates :before sql' do
- expected_sql = <<~SQL
- (
- LOWER("projects"."name") IS NULL
- AND
- "projects"."id" > 500
- )
- OR (LOWER("projects"."name") IS NOT NULL)
- SQL
-
- expect(condition.build.squish).to eq expected_sql.squish
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
deleted file mode 100644
index 8a2b5ae0d38..00000000000
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_generic_keyset_spec.rb
+++ /dev/null
@@ -1,415 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
- include GraphqlHelpers
-
- # https://gitlab.com/gitlab-org/gitlab/-/issues/334973
- # The spec will be merged with connection_spec.rb in the future.
- let(:nodes) { Project.all.order(id: :asc) }
- let(:arguments) { {} }
- let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
-
- let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
- let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
- let_it_be(:column_order_updated_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'updated_at', order_expression: Project.arel_table[:updated_at].asc) }
- let_it_be(:column_order_created_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'created_at', order_expression: Project.arel_table[:created_at].asc) }
- let_it_be(:column_order_last_repo) do
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'last_repository_check_at',
- column_expression: Project.arel_table[:last_repository_check_at],
- order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
- reversed_order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
- order_direction: :asc,
- nullable: :nulls_last,
- distinct: false)
- end
-
- let_it_be(:column_order_last_repo_desc) do
- Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
- attribute_name: 'last_repository_check_at',
- column_expression: Project.arel_table[:last_repository_check_at],
- order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
- reversed_order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
- order_direction: :desc,
- nullable: :nulls_last,
- distinct: false)
- end
-
- subject(:connection) do
- described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
- end
-
- def encoded_cursor(node)
- described_class.new(nodes, context: context).cursor_for(node)
- end
-
- def decoded_cursor(cursor)
- Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
- end
-
- describe "With generic keyset order support" do
- let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
-
- it_behaves_like 'a connection with collection methods'
-
- it_behaves_like 'a redactable connection' do
- let_it_be(:projects) { create_list(:project, 2) }
- let(:unwanted) { projects.second }
- end
-
- describe '#cursor_for' do
- let(:project) { create(:project) }
- let(:cursor) { connection.cursor_for(project) }
-
- it 'returns an encoded ID' do
- expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
- end
-
- context 'when an order is specified' do
- let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
-
- it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
- end
- end
-
- context 'when multiple orders are specified' do
- let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
-
- it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
- end
- end
- end
-
- describe '#sliced_nodes' do
- let(:projects) { create_list(:project, 4) }
-
- context 'when before is passed' do
- let(:arguments) { { before: encoded_cursor(projects[1]) } }
-
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(projects.first)
- end
-
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
-
- it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
- end
- end
- end
-
- context 'when after is passed' do
- let(:arguments) { { after: encoded_cursor(projects[1]) } }
-
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
- end
-
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
-
- it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(projects.first)
- end
- end
- end
-
- context 'when both before and after are passed' do
- let(:arguments) do
- {
- after: encoded_cursor(projects[1]),
- before: encoded_cursor(projects[3])
- }
- end
-
- it 'returns the expected set' do
- expect(subject.sliced_nodes).to contain_exactly(projects[2])
- end
- end
-
- shared_examples 'nodes are in ascending order' do
- context 'when no cursor is passed' do
- let(:arguments) { {} }
-
- it 'returns projects in ascending order' do
- expect(subject.sliced_nodes).to eq(ascending_nodes)
- end
- end
-
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
- end
- end
-
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
- end
- end
-
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
- end
- end
- end
-
- shared_examples 'nodes are in descending order' do
- context 'when no cursor is passed' do
- let(:arguments) { {} }
-
- it 'only returns projects in descending order' do
- expect(subject.sliced_nodes).to eq(descending_nodes)
- end
- end
-
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
- end
- end
-
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
- end
- end
-
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
- end
- end
- end
-
- context 'when multiple orders with nil values are defined' do
- let_it_be(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
- let_it_be(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
- let_it_be(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
- let_it_be(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
- let_it_be(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
-
- context 'when ascending' do
- let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo, column_order_id]) }
- let_it_be(:nodes) { Project.order(order) }
- let_it_be(:ascending_nodes) { [project5, project1, project3, project2, project4] }
-
- it_behaves_like 'nodes are in ascending order'
-
- context 'when before cursor value is NULL' do
- let(:arguments) { { before: encoded_cursor(project4) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
- end
- end
-
- context 'when after cursor value is NULL' do
- let(:arguments) { { after: encoded_cursor(project2) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project4])
- end
- end
- end
-
- context 'when descending' do
- let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo_desc, column_order_id]) }
- let_it_be(:nodes) { Project.order(order) }
- let_it_be(:descending_nodes) { [project3, project1, project5, project2, project4] }
-
- it_behaves_like 'nodes are in descending order'
-
- context 'when before cursor value is NULL' do
- let(:arguments) { { before: encoded_cursor(project4) } }
-
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
- end
- end
-
- context 'when after cursor value is NULL' do
- let(:arguments) { { after: encoded_cursor(project2) } }
-
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq([project4])
- end
- end
- end
- end
-
- context 'when ordering by similarity' do
- let_it_be(:project1) { create(:project, name: 'test') }
- let_it_be(:project2) { create(:project, name: 'testing') }
- let_it_be(:project3) { create(:project, name: 'tests') }
- let_it_be(:project4) { create(:project, name: 'testing stuff') }
- let_it_be(:project5) { create(:project, name: 'test') }
-
- let_it_be(:nodes) do
- # Note: sorted_by_similarity_desc scope internally supports the generic keyset order.
- Project.sorted_by_similarity_desc('test', include_in_select: true)
- end
-
- let_it_be(:descending_nodes) { nodes.to_a }
-
- it_behaves_like 'nodes are in descending order'
- end
-
- context 'when an invalid cursor is provided' do
- let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
-
- it 'raises an error' do
- expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
- end
- end
- end
-
- describe '#nodes' do
- let_it_be(:all_nodes) { create_list(:project, 5) }
-
- let(:paged_nodes) { subject.nodes }
-
- it_behaves_like 'connection with paged nodes' do
- let(:paged_nodes_size) { 3 }
- end
-
- context 'when both are passed' do
- let(:arguments) { { first: 2, last: 2 } }
-
- it 'raises an error' do
- expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
- end
- end
-
- context 'when primary key is not in original order' do
- let(:nodes) { Project.order(last_repository_check_at: :desc) }
-
- it 'is added to end' do
- sliced = subject.sliced_nodes
-
- order_sql = sliced.order_values.last.to_sql
-
- expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
- end
- end
-
- context 'when there is no primary key' do
- before do
- stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
- NoPrimaryKey.class_eval do
- self.table_name = 'no_primary_key'
- self.primary_key = nil
- end
- end
-
- let(:nodes) { NoPrimaryKey.all }
-
- it 'raises an error' do
- expect(NoPrimaryKey.primary_key).to be_nil
- expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
- end
- end
- end
-
- describe '#has_previous_page and #has_next_page' do
- # using a list of 5 items with a max_page of 3
- let_it_be(:project_list) { create_list(:project, 5) }
- let_it_be(:nodes) { Project.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
-
- context 'when default query' do
- let(:arguments) { {} }
-
- it 'has no previous, but a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- context 'when before is first item' do
- let(:arguments) { { before: encoded_cursor(project_list.first) } }
-
- it 'has no previous, but a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- describe 'using `before`' do
- context 'when before is the last item' do
- let(:arguments) { { before: encoded_cursor(project_list.last) } }
-
- it 'has no previous, but a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- context 'when before and last specified' do
- let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
-
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- context 'when before and last does request all remaining nodes' do
- let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
-
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- expect(subject.nodes).to eq [project_list[0]]
- end
- end
- end
-
- describe 'using `after`' do
- context 'when after is the first item' do
- let(:arguments) { { after: encoded_cursor(project_list.first) } }
-
- it 'has a previous, and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- context 'when after and first specified' do
- let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
-
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- context 'when before and last does request all remaining nodes' do
- let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
-
- it 'has a previous but no next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_falsey
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 6574b3e3131..b54c618d8e0 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -5,10 +5,38 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
include GraphqlHelpers
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/334973
+ # The spec will be merged with connection_spec.rb in the future.
let(:nodes) { Project.all.order(id: :asc) }
let(:arguments) { {} }
let(:context) { GraphQL::Query::Context.new(query: query_double, values: nil, object: nil) }
+ let_it_be(:column_order_id) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].asc) }
+ let_it_be(:column_order_id_desc) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'id', order_expression: Project.arel_table[:id].desc) }
+ let_it_be(:column_order_updated_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'updated_at', order_expression: Project.arel_table[:updated_at].asc) }
+ let_it_be(:column_order_created_at) { Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(attribute_name: 'created_at', order_expression: Project.arel_table[:created_at].asc) }
+ let_it_be(:column_order_last_repo) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
+ reversed_order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
+ order_direction: :asc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
+ let_it_be(:column_order_last_repo_desc) do
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'last_repository_check_at',
+ column_expression: Project.arel_table[:last_repository_check_at],
+ order_expression: Project.arel_table[:last_repository_check_at].desc.nulls_last,
+ reversed_order_expression: Project.arel_table[:last_repository_check_at].asc.nulls_last,
+ order_direction: :desc,
+ nullable: :nulls_last,
+ distinct: false)
+ end
+
subject(:connection) do
described_class.new(nodes, **{ context: context, max_page_size: 3 }.merge(arguments))
end
@@ -21,414 +49,293 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
Gitlab::Json.parse(Base64Bp.urlsafe_decode64(cursor))
end
- # see: https://gitlab.com/gitlab-org/gitlab/-/issues/297358
- context 'the relation has been preloaded' do
- let(:projects) { Project.all.preload(:issues) }
- let(:nodes) { projects.first.issues }
-
- before do
- project = create(:project)
- create_list(:issue, 3, project: project)
- end
-
- it 'is loaded' do
- expect(nodes).to be_loaded
- end
-
- it 'does not error when accessing pagination information' do
- connection.first = 2
-
- expect(connection).to have_attributes(
- has_previous_page: false,
- has_next_page: true
- )
- end
-
- it 'can generate cursors' do
- connection.send(:ordered_items) # necessary to generate the order-list
-
- expect(connection.cursor_for(nodes.first)).to be_a(String)
- end
-
- it 'can read the next page' do
- connection.send(:ordered_items) # necessary to generate the order-list
- ordered = nodes.reorder(id: :desc)
- next_page = described_class.new(nodes,
- context: context,
- max_page_size: 3,
- after: connection.cursor_for(ordered.second))
-
- expect(next_page.sliced_nodes).to contain_exactly(ordered.third)
- end
- end
-
- it_behaves_like 'a connection with collection methods'
-
- it_behaves_like 'a redactable connection' do
- let_it_be(:projects) { create_list(:project, 2) }
- let(:unwanted) { projects.second }
- end
-
- describe '#cursor_for' do
- let(:project) { create(:project) }
- let(:cursor) { connection.cursor_for(project) }
-
- it 'returns an encoded ID' do
- expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
- end
-
- context 'when SimpleOrderBuilder cannot build keyset paginated query' do
- it 'increments the `old_keyset_pagination_usage` counter', :prometheus do
- expect(Gitlab::Pagination::Keyset::SimpleOrderBuilder).to receive(:build).and_return([false, nil])
-
- decoded_cursor(cursor)
-
- counter = Gitlab::Metrics.registry.get(:old_keyset_pagination_usage)
- expect(counter.get(model: 'Project')).to eq(1)
- end
- end
-
- context 'when an order is specified' do
- let(:nodes) { Project.order(:updated_at) }
+ describe "with generic keyset order support" do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
- it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
- end
-
- it 'includes the :id even when not specified in the order' do
- expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
- end
- end
+ it_behaves_like 'a connection with collection methods'
- context 'when multiple orders are specified' do
- let(:nodes) { Project.order(:updated_at).order(:created_at) }
-
- it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
- end
+ it_behaves_like 'a redactable connection' do
+ let_it_be(:projects) { create_list(:project, 2) }
+ let(:unwanted) { projects.second }
end
- context 'when multiple orders with SQL are specified' do
- let(:nodes) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
+ describe '#cursor_for' do
+ let(:project) { create(:project) }
+ let(:cursor) { connection.cursor_for(project) }
- it 'returns the encoded value of the order' do
- expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
+ it 'returns an encoded ID' do
+ expect(decoded_cursor(cursor)).to eq('id' => project.id.to_s)
end
- end
- end
- describe '#sliced_nodes' do
- let(:projects) { create_list(:project, 4) }
+ context 'when an order is specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
- context 'when before is passed' do
- let(:arguments) { { before: encoded_cursor(projects[1]) } }
-
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('id' => project.id.to_s)
+ end
end
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(id: :desc) }
+ context 'when multiple orders are specified' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_updated_at, column_order_created_at, column_order_id])) }
- it 'returns the correct nodes' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
+ it 'returns the encoded value of the order' do
+ expect(decoded_cursor(cursor)).to include('updated_at' => project.updated_at.to_s(:inspect))
end
end
end
- context 'when after is passed' do
- let(:arguments) { { after: encoded_cursor(projects[1]) } }
+ describe '#sliced_nodes' do
+ let(:projects) { create_list(:project, 4) }
- it 'only returns the project before the selected one' do
- expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
- end
+ context 'when before is passed' do
+ let(:arguments) { { before: encoded_cursor(projects[1]) } }
- context 'when the sort order is descending' do
- let(:nodes) { Project.all.order(id: :desc) }
-
- it 'returns the correct nodes' do
+ it 'only returns the project before the selected one' do
expect(subject.sliced_nodes).to contain_exactly(projects.first)
end
- end
- end
- context 'when both before and after are passed' do
- let(:arguments) do
- {
- after: encoded_cursor(projects[1]),
- before: encoded_cursor(projects[3])
- }
- end
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
- it 'returns the expected set' do
- expect(subject.sliced_nodes).to contain_exactly(projects[2])
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
+ end
+ end
end
- end
- shared_examples 'nodes are in ascending order' do
- context 'when no cursor is passed' do
- let(:arguments) { {} }
+ context 'when after is passed' do
+ let(:arguments) { { after: encoded_cursor(projects[1]) } }
- it 'returns projects in ascending order' do
- expect(subject.sliced_nodes).to eq(ascending_nodes)
+ it 'only returns the project before the selected one' do
+ expect(subject.sliced_nodes).to contain_exactly(*projects[2..])
end
- end
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
+ context 'when the sort order is descending' do
+ let(:nodes) { Project.all.order(Gitlab::Pagination::Keyset::Order.build([column_order_id_desc])) }
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
+ it 'returns the correct nodes' do
+ expect(subject.sliced_nodes).to contain_exactly(projects.first)
+ end
end
end
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
+ context 'when both before and after are passed' do
+ let(:arguments) do
+ {
+ after: encoded_cursor(projects[1]),
+ before: encoded_cursor(projects[3])
+ }
+ end
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
+ it 'returns the expected set' do
+ expect(subject.sliced_nodes).to contain_exactly(projects[2])
end
end
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
+ shared_examples 'nodes are in ascending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
+ it 'returns projects in ascending order' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes)
+ end
end
- end
- end
- shared_examples 'nodes are in descending order' do
- context 'when no cursor is passed' do
- let(:arguments) { {} }
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes[2]) } }
- it 'only returns projects in descending order' do
- expect(subject.sliced_nodes).to eq(descending_nodes)
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.first(2))
+ end
end
- end
- context 'when before cursor value is not NULL' do
- let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(ascending_nodes[1]) } }
- it 'returns all projects before the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes.last(3))
+ end
end
- end
- context 'when after cursor value is not NULL' do
- let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(ascending_nodes.last), after: encoded_cursor(ascending_nodes.first) } }
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(ascending_nodes[1..3])
+ end
end
end
- context 'when before and after cursor' do
- let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
+ shared_examples 'nodes are in descending order' do
+ context 'when no cursor is passed' do
+ let(:arguments) { {} }
- it 'returns all projects after the cursor' do
- expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
+ it 'only returns projects in descending order' do
+ expect(subject.sliced_nodes).to eq(descending_nodes)
+ end
end
- end
- end
- context 'when ordering uses LOWER' do
- let!(:project1) { create(:project, name: 'A') } # Asc: project1 Desc: project4
- let!(:project2) { create(:project, name: 'c') } # Asc: project5 Desc: project2
- let!(:project3) { create(:project, name: 'b') } # Asc: project3 Desc: project3
- let!(:project4) { create(:project, name: 'd') } # Asc: project2 Desc: project5
- let!(:project5) { create(:project, name: 'a') } # Asc: project4 Desc: project1
+ context 'when before cursor value is not NULL' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes[2]) } }
- context 'when ascending' do
- let(:nodes) do
- Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(id: :asc)
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.first(2))
+ end
end
- let(:ascending_nodes) { [project1, project5, project3, project2, project4] }
+ context 'when after cursor value is not NULL' do
+ let(:arguments) { { after: encoded_cursor(descending_nodes[1]) } }
- it_behaves_like 'nodes are in ascending order'
- end
-
- context 'when descending' do
- let(:nodes) do
- Project.order(Arel::Table.new(:projects)['name'].lower.desc).order(id: :desc)
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes.last(3))
+ end
end
- let(:descending_nodes) { [project4, project2, project3, project5, project1] }
+ context 'when before and after cursor' do
+ let(:arguments) { { before: encoded_cursor(descending_nodes.last), after: encoded_cursor(descending_nodes.first) } }
- it_behaves_like 'nodes are in descending order'
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq(descending_nodes[1..3])
+ end
+ end
end
- end
- context 'NULLS order' do
- using RSpec::Parameterized::TableSyntax
+ context 'when multiple orders with nil values are defined' do
+ let_it_be(:project1) { create(:project, last_repository_check_at: 10.days.ago) } # Asc: project5 Desc: project3
+ let_it_be(:project2) { create(:project, last_repository_check_at: nil) } # Asc: project1 Desc: project1
+ let_it_be(:project3) { create(:project, last_repository_check_at: 5.days.ago) } # Asc: project3 Desc: project5
+ let_it_be(:project4) { create(:project, last_repository_check_at: nil) } # Asc: project2 Desc: project2
+ let_it_be(:project5) { create(:project, last_repository_check_at: 20.days.ago) } # Asc: project4 Desc: project4
- let_it_be(:issue1) { create(:issue, relative_position: nil) }
- let_it_be(:issue2) { create(:issue, relative_position: 100) }
- let_it_be(:issue3) { create(:issue, relative_position: 200) }
- let_it_be(:issue4) { create(:issue, relative_position: nil) }
- let_it_be(:issue5) { create(:issue, relative_position: 300) }
+ context 'when ascending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:ascending_nodes) { [project5, project1, project3, project2, project4] }
- context 'when ascending NULLS LAST (ties broken by id DESC implicitly)' do
- let(:ascending_nodes) { [issue2, issue3, issue5, issue4, issue1] }
+ it_behaves_like 'nodes are in ascending order'
- where(:nodes) do
- [
- lazy { Issue.order(Issue.arel_table[:relative_position].asc.nulls_last) }
- ]
- end
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
- with_them do
- it_behaves_like 'nodes are in ascending order'
- end
- end
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project5, project1, project3, project2])
+ end
+ end
- context 'when descending NULLS LAST (ties broken by id DESC implicitly)' do
- let(:descending_nodes) { [issue5, issue3, issue2, issue4, issue1] }
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
- where(:nodes) do
- [
- lazy { Issue.order(Issue.arel_table[:relative_position].desc.nulls_last) }
-]
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
end
- with_them do
+ context 'when descending' do
+ let_it_be(:order) { Gitlab::Pagination::Keyset::Order.build([column_order_last_repo_desc, column_order_id]) }
+ let_it_be(:nodes) { Project.order(order) }
+ let_it_be(:descending_nodes) { [project3, project1, project5, project2, project4] }
+
it_behaves_like 'nodes are in descending order'
- end
- end
- context 'when ascending NULLS FIRST with a tie breaker' do
- let(:ascending_nodes) { [issue1, issue4, issue2, issue3, issue5] }
+ context 'when before cursor value is NULL' do
+ let(:arguments) { { before: encoded_cursor(project4) } }
- where(:nodes) do
- [
- lazy { Issue.order(Issue.arel_table[:relative_position].asc.nulls_first).order(id: :asc) }
-]
- end
+ it 'returns all projects before the cursor' do
+ expect(subject.sliced_nodes).to eq([project3, project1, project5, project2])
+ end
+ end
- with_them do
- it_behaves_like 'nodes are in ascending order'
+ context 'when after cursor value is NULL' do
+ let(:arguments) { { after: encoded_cursor(project2) } }
+
+ it 'returns all projects after the cursor' do
+ expect(subject.sliced_nodes).to eq([project4])
+ end
+ end
end
end
- context 'when descending NULLS FIRST with a tie breaker' do
- let(:descending_nodes) { [issue1, issue4, issue5, issue3, issue2] }
+ context 'when ordering by similarity' do
+ let_it_be(:project1) { create(:project, name: 'test') }
+ let_it_be(:project2) { create(:project, name: 'testing') }
+ let_it_be(:project3) { create(:project, name: 'tests') }
+ let_it_be(:project4) { create(:project, name: 'testing stuff') }
+ let_it_be(:project5) { create(:project, name: 'test') }
- where(:nodes) do
- [
- lazy { Issue.order(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :asc) }
-]
+ let_it_be(:nodes) do
+ # Note: sorted_by_similarity_desc scope internally supports the generic keyset order.
+ Project.sorted_by_similarity_desc('test', include_in_select: true)
end
- with_them do
- it_behaves_like 'nodes are in descending order'
- end
- end
- end
+ let_it_be(:descending_nodes) { nodes.to_a }
- context 'when ordering by similarity' do
- let!(:project1) { create(:project, name: 'test') }
- let!(:project2) { create(:project, name: 'testing') }
- let!(:project3) { create(:project, name: 'tests') }
- let!(:project4) { create(:project, name: 'testing stuff') }
- let!(:project5) { create(:project, name: 'test') }
-
- let(:nodes) do
- Project.sorted_by_similarity_desc('test', include_in_select: true)
+ it_behaves_like 'nodes are in descending order'
end
- let(:descending_nodes) { nodes.to_a }
-
- it_behaves_like 'nodes are in descending order'
- end
+ context 'when an invalid cursor is provided' do
+ let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
- context 'when an invalid cursor is provided' do
- let(:arguments) { { before: Base64Bp.urlsafe_encode64('invalidcursor', padding: false) } }
-
- it 'raises an error' do
- expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it 'raises an error' do
+ expect { subject.sliced_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
end
- end
- describe '#nodes' do
- let_it_be(:all_nodes) { create_list(:project, 5) }
+ describe '#nodes' do
+ let_it_be(:all_nodes) { create_list(:project, 5) }
- let(:paged_nodes) { subject.nodes }
+ let(:paged_nodes) { subject.nodes }
- it_behaves_like 'connection with paged nodes' do
- let(:paged_nodes_size) { 3 }
- end
-
- context 'when both are passed' do
- let(:arguments) { { first: 2, last: 2 } }
-
- it 'raises an error' do
- expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ it_behaves_like 'connection with paged nodes' do
+ let(:paged_nodes_size) { 3 }
end
- end
- context 'when primary key is not in original order' do
- let(:nodes) { Project.order(last_repository_check_at: :desc) }
+ context 'when both are passed' do
+ let(:arguments) { { first: 2, last: 2 } }
- before do
- stub_feature_flags(new_graphql_keyset_pagination: false)
+ it 'raises an error' do
+ expect { paged_nodes }.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
end
- it 'is added to end' do
- sliced = subject.sliced_nodes
+ context 'when primary key is not in original order' do
+ let(:nodes) { Project.order(last_repository_check_at: :desc) }
- order_sql = sliced.order_values.last.to_sql
+ it 'is added to end' do
+ sliced = subject.sliced_nodes
- expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
- end
- end
+ order_sql = sliced.order_values.last.to_sql
- context 'when there is no primary key' do
- before do
- stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
- NoPrimaryKey.class_eval do
- self.table_name = 'no_primary_key'
- self.primary_key = nil
+ expect(order_sql).to end_with(Project.arel_table[:id].desc.to_sql)
end
end
- let(:nodes) { NoPrimaryKey.all }
-
- it 'raises an error' do
- expect(NoPrimaryKey.primary_key).to be_nil
- expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
- end
- end
- end
-
- describe '#has_previous_page and #has_next_page' do
- # using a list of 5 items with a max_page of 3
- let_it_be(:project_list) { create_list(:project, 5) }
- let_it_be(:nodes) { Project.order(:id) }
+ context 'when there is no primary key' do
+ before do
+ stub_const('NoPrimaryKey', Class.new(ActiveRecord::Base))
+ NoPrimaryKey.class_eval do
+ self.table_name = 'no_primary_key'
+ self.primary_key = nil
+ end
+ end
- context 'when default query' do
- let(:arguments) { {} }
+ let(:nodes) { NoPrimaryKey.all }
- it 'has no previous, but a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
+ it 'raises an error' do
+ expect(NoPrimaryKey.primary_key).to be_nil
+ expect { subject.sliced_nodes }.to raise_error(ArgumentError, 'Relation must have a primary key')
+ end
end
end
- context 'when before is first item' do
- let(:arguments) { { before: encoded_cursor(project_list.first) } }
+ describe '#has_previous_page and #has_next_page' do
+ # using a list of 5 items with a max_page of 3
+ let_it_be(:project_list) { create_list(:project, 5) }
+ let_it_be(:nodes) { Project.order(Gitlab::Pagination::Keyset::Order.build([column_order_id])) }
- it 'has no previous, but a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- end
- end
-
- describe 'using `before`' do
- context 'when before is the last item' do
- let(:arguments) { { before: encoded_cursor(project_list.last) } }
+ context 'when default query' do
+ let(:arguments) { {} }
it 'has no previous, but a next' do
expect(subject.has_previous_page).to be_falsey
@@ -436,51 +343,71 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
end
end
- context 'when before and last specified' do
- let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
+ context 'when before is first item' do
+ let(:arguments) { { before: encoded_cursor(project_list.first) } }
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_truthy
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
expect(subject.has_next_page).to be_truthy
end
end
- context 'when before and last does request all remaining nodes' do
- let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
+ describe 'using `before`' do
+ context 'when before is the last item' do
+ let(:arguments) { { before: encoded_cursor(project_list.last) } }
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_falsey
- expect(subject.has_next_page).to be_truthy
- expect(subject.nodes).to eq [project_list[0]]
+ it 'has no previous, but a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ end
end
- end
- end
- describe 'using `after`' do
- context 'when after is the first item' do
- let(:arguments) { { after: encoded_cursor(project_list.first) } }
+ context 'when before and last specified' do
+ let(:arguments) { { before: encoded_cursor(project_list.last), last: 2 } }
- it 'has a previous, and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
end
- end
- context 'when after and first specified' do
- let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { before: encoded_cursor(project_list[1]), last: 3 } }
- it 'has a previous and a next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_truthy
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_falsey
+ expect(subject.has_next_page).to be_truthy
+ expect(subject.nodes).to eq [project_list[0]]
+ end
end
end
- context 'when before and last does request all remaining nodes' do
- let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
+ describe 'using `after`' do
+ context 'when after is the first item' do
+ let(:arguments) { { after: encoded_cursor(project_list.first) } }
+
+ it 'has a previous, and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when after and first specified' do
+ let(:arguments) { { after: encoded_cursor(project_list.first), first: 2 } }
+
+ it 'has a previous and a next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_truthy
+ end
+ end
+
+ context 'when before and last does request all remaining nodes' do
+ let(:arguments) { { after: encoded_cursor(project_list[2]), last: 3 } }
- it 'has a previous but no next' do
- expect(subject.has_previous_page).to be_truthy
- expect(subject.has_next_page).to be_falsey
+ it 'has a previous but no next' do
+ expect(subject.has_previous_page).to be_truthy
+ expect(subject.has_next_page).to be_falsey
+ end
end
end
end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
deleted file mode 100644
index 40ee47ece49..00000000000
--- a/spec/lib/gitlab/graphql/pagination/keyset/order_info_spec.rb
+++ /dev/null
@@ -1,118 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::Pagination::Keyset::OrderInfo do
- describe '#build_order_list' do
- let(:order_list) { described_class.build_order_list(relation) }
-
- context 'when multiple orders with SQL is specified' do
- let(:relation) { Project.order(Arel.sql('projects.updated_at IS NULL')).order(:updated_at).order(:id) }
-
- it 'ignores the SQL order' do
- expect(order_list.count).to eq 2
- expect(order_list.first.attribute_name).to eq 'updated_at'
- expect(order_list.first.operator_for(:after)).to eq '>'
- expect(order_list.last.attribute_name).to eq 'id'
- expect(order_list.last.operator_for(:after)).to eq '>'
- end
- end
-
- context 'when order contains NULLS LAST' do
- let(:relation) { Project.order(Arel.sql('projects.updated_at Asc Nulls Last')).order(:id) }
-
- it 'does not ignore the SQL order' do
- expect(order_list.count).to eq 2
- expect(order_list.first.attribute_name).to eq 'projects.updated_at'
- expect(order_list.first.operator_for(:after)).to eq '>'
- expect(order_list.last.attribute_name).to eq 'id'
- expect(order_list.last.operator_for(:after)).to eq '>'
- end
- end
-
- context 'when order contains invalid formatted NULLS LAST ' do
- let(:relation) { Project.order(Arel.sql('projects.updated_at created_at Asc Nulls Last')).order(:id) }
-
- it 'ignores the SQL order' do
- expect(order_list.count).to eq 1
- end
- end
-
- context 'when order contains LOWER' do
- let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
-
- it 'does not ignore the SQL order' do
- expect(order_list.count).to eq 2
- expect(order_list.first.attribute_name).to eq 'name'
- expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
- expect(order_list.first.named_function.to_sql).to eq 'LOWER("projects"."name")'
- expect(order_list.first.operator_for(:after)).to eq '>'
- expect(order_list.last.attribute_name).to eq 'id'
- expect(order_list.last.operator_for(:after)).to eq '>'
- end
- end
-
- context 'when ordering by CASE', :aggregate_failuers do
- let(:relation) { Project.order(Arel::Nodes::Case.new(Project.arel_table[:pending_delete]).when(true).then(100).else(1000).asc) }
-
- it 'assigns the right attribute name, named function, and direction' do
- expect(order_list.count).to eq 1
- expect(order_list.first.attribute_name).to eq 'case_order_value'
- expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::Case)
- expect(order_list.first.sort_direction).to eq :asc
- end
- end
-
- context 'when ordering by ARRAY_POSITION', :aggregate_failuers do
- let(:array_position) { Arel::Nodes::NamedFunction.new('ARRAY_POSITION', [Arel.sql("ARRAY[1,0]::smallint[]"), Project.arel_table[:auto_cancel_pending_pipelines]]) }
- let(:relation) { Project.order(array_position.asc) }
-
- it 'assigns the right attribute name, named function, and direction' do
- expect(order_list.count).to eq 1
- expect(order_list.first.attribute_name).to eq 'array_position'
- expect(order_list.first.named_function).to be_kind_of(Arel::Nodes::NamedFunction)
- expect(order_list.first.sort_direction).to eq :asc
- end
- end
- end
-
- describe '#validate_ordering' do
- let(:order_list) { described_class.build_order_list(relation) }
-
- context 'when number of ordering fields is 0' do
- let(:relation) { Project.all }
-
- it 'raises an error' do
- expect { described_class.validate_ordering(relation, order_list) }
- .to raise_error(ArgumentError, 'A minimum of 1 ordering field is required')
- end
- end
-
- context 'when number of ordering fields is over 2' do
- let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc).order(:id) }
-
- it 'raises an error' do
- expect { described_class.validate_ordering(relation, order_list) }
- .to raise_error(ArgumentError, 'A maximum of 2 ordering fields are allowed')
- end
- end
-
- context 'when the second (or first) column is nullable' do
- let(:relation) { Project.order(last_repository_check_at: :desc).order(updated_at: :desc) }
-
- it 'raises an error' do
- expect { described_class.validate_ordering(relation, order_list) }
- .to raise_error(ArgumentError, "Column `updated_at` must not allow NULL")
- end
- end
-
- context 'for last ordering field' do
- let(:relation) { Project.order(namespace_id: :desc) }
-
- it 'raises error if primary key is not last field' do
- expect { described_class.validate_ordering(relation, order_list) }
- .to raise_error(ArgumentError, "Last ordering field must be the primary key, `#{relation.primary_key}`")
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
deleted file mode 100644
index 31c02fd43e8..00000000000
--- a/spec/lib/gitlab/graphql/pagination/keyset/query_builder_spec.rb
+++ /dev/null
@@ -1,135 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Graphql::Pagination::Keyset::QueryBuilder do
- context 'when number of ordering fields is 0' do
- it 'raises an error' do
- expect { described_class.new(Issue.arel_table, [], {}, :after) }
- .to raise_error(ArgumentError, 'No ordering scopes have been supplied')
- end
- end
-
- describe '#conditions' do
- let(:relation) { Issue.order(relative_position: :desc).order(:id) }
- let(:order_list) { Gitlab::Graphql::Pagination::Keyset::OrderInfo.build_order_list(relation) }
- let(:arel_table) { Issue.arel_table }
- let(:builder) { described_class.new(arel_table, order_list, decoded_cursor, before_or_after) }
- let(:before_or_after) { :after }
-
- context 'when only a single ordering' do
- let(:relation) { Issue.order(id: :desc) }
-
- context 'when the value is nil' do
- let(:decoded_cursor) { { 'id' => nil } }
-
- it 'raises an error' do
- expect { builder.conditions }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'Before/after cursor invalid: `nil` was provided as only sortable value')
- end
- end
-
- context 'when value is not nil' do
- let(:decoded_cursor) { { 'id' => 100 } }
- let(:conditions) { builder.conditions }
-
- context 'when :after' do
- it 'generates the correct condition' do
- expect(conditions.strip).to eq '("issues"."id" < 100)'
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- expect(conditions.strip).to eq '("issues"."id" > 100)'
- end
- end
- end
- end
-
- context 'when two orderings' do
- let(:decoded_cursor) { { 'relative_position' => 1500, 'id' => 100 } }
-
- context 'when no values are nil' do
- context 'when :after' do
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '"issues"."relative_position" < 1500'
- expect(conditions).to include '"issues"."id" > 100'
- expect(conditions).to include 'OR ("issues"."relative_position" IS NULL)'
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '("issues"."relative_position" > 1500)'
- expect(conditions).to include '"issues"."id" < 100'
- expect(conditions).to include '"issues"."relative_position" = 1500'
- end
- end
- end
-
- context 'when first value is nil' do
- let(:decoded_cursor) { { 'relative_position' => nil, 'id' => 100 } }
-
- context 'when :after' do
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '"issues"."relative_position" IS NULL'
- expect(conditions).to include '"issues"."id" > 100'
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '"issues"."relative_position" IS NULL'
- expect(conditions).to include '"issues"."id" < 100'
- expect(conditions).to include 'OR ("issues"."relative_position" IS NOT NULL)'
- end
- end
- end
- end
-
- context 'when sorting using LOWER' do
- let(:relation) { Project.order(Arel::Table.new(:projects)['name'].lower.asc).order(:id) }
- let(:arel_table) { Project.arel_table }
- let(:decoded_cursor) { { 'name' => 'Test', 'id' => 100 } }
-
- context 'when no values are nil' do
- context 'when :after' do
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '(LOWER("projects"."name") > \'test\')'
- expect(conditions).to include '"projects"."id" > 100'
- expect(conditions).to include 'OR (LOWER("projects"."name") IS NULL)'
- end
- end
-
- context 'when :before' do
- let(:before_or_after) { :before }
-
- it 'generates the correct condition' do
- conditions = builder.conditions
-
- expect(conditions).to include '(LOWER("projects"."name") < \'test\')'
- expect(conditions).to include '"projects"."id" < 100'
- expect(conditions).to include 'LOWER("projects"."name") = \'test\''
- end
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/graphql/type_name_deprecations_spec.rb b/spec/lib/gitlab/graphql/type_name_deprecations_spec.rb
new file mode 100644
index 00000000000..0505e709a3b
--- /dev/null
+++ b/spec/lib/gitlab/graphql/type_name_deprecations_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../support/helpers/type_name_deprecation_helpers'
+
+RSpec.describe Gitlab::Graphql::TypeNameDeprecations do
+ include TypeNameDeprecationHelpers
+
+ let(:deprecation_1) do
+ described_class::NameDeprecation.new(old_name: 'Foo::Model', new_name: 'Bar', milestone: '9.0')
+ end
+
+ let(:deprecation_2) do
+ described_class::NameDeprecation.new(old_name: 'Baz', new_name: 'Qux::Model', milestone: '10.0')
+ end
+
+ before do
+ stub_type_name_deprecations(deprecation_1, deprecation_2)
+ end
+
+ describe '.deprecated?' do
+ it 'returns a boolean to signal if model name has a deprecation', :aggregate_failures do
+ expect(described_class.deprecated?('Foo::Model')).to eq(true)
+ expect(described_class.deprecated?('Qux::Model')).to eq(false)
+ end
+ end
+
+ describe '.deprecation_for' do
+ it 'returns the deprecation for the model if it exists', :aggregate_failures do
+ expect(described_class.deprecation_for('Foo::Model')).to eq(deprecation_1)
+ expect(described_class.deprecation_for('Qux::Model')).to be_nil
+ end
+ end
+
+ describe '.deprecation_by' do
+ it 'returns the deprecation by the model if it exists', :aggregate_failures do
+ expect(described_class.deprecation_by('Foo::Model')).to be_nil
+ expect(described_class.deprecation_by('Qux::Model')).to eq(deprecation_2)
+ end
+ end
+
+ describe '.apply_to_graphql_name' do
+ it 'returns the corresponding graphql_name of the GID for the new model', :aggregate_failures do
+ expect(described_class.apply_to_graphql_name('Foo::Model')).to eq('Bar')
+ expect(described_class.apply_to_graphql_name('Baz')).to eq('Qux::Model')
+ end
+
+ it 'returns the same value if there is no deprecation' do
+ expect(described_class.apply_to_graphql_name('Project')).to eq('Project')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphs/commits_spec.rb b/spec/lib/gitlab/graphs/commits_spec.rb
index 79cec2d8705..c3c696ceedc 100644
--- a/spec/lib/gitlab/graphs/commits_spec.rb
+++ b/spec/lib/gitlab/graphs/commits_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Graphs::Commits do
let!(:project) { create(:project, :public) }
let!(:commit1) { create(:commit, git_commit: RepoHelpers.sample_commit, project: project, committed_date: Time.now) }
- let!(:commit1_yesterday) { create(:commit, git_commit: RepoHelpers.sample_commit, project: project, committed_date: 1.day.ago)}
+ let!(:commit1_yesterday) { create(:commit, git_commit: RepoHelpers.sample_commit, project: project, committed_date: 1.day.ago) }
let!(:commit2) { create(:commit, git_commit: RepoHelpers.another_sample_commit, project: project, committed_date: Time.now) }
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 537e59d91c3..d7ae6ed06a4 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Highlight do
context 'diff highlighting' do
let(:file_name) { 'test.diff' }
- let(:content) { "+aaa\n+bbb\n- ccc\n ddd\n"}
+ let(:content) { "+aaa\n+bbb\n- ccc\n ddd\n" }
let(:expected) do
%q(<span id="LC1" class="line" lang="diff"><span class="gi">+aaa</span></span>
<span id="LC2" class="line" lang="diff"><span class="gi">+bbb</span></span>
diff --git a/spec/lib/gitlab/hook_data/group_builder_spec.rb b/spec/lib/gitlab/hook_data/group_builder_spec.rb
index d7347ff99d4..4e6152390a4 100644
--- a/spec/lib/gitlab/hook_data/group_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/group_builder_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe Gitlab::HookData::GroupBuilder do
let(:event) { :create }
it { expect(event_name).to eq('group_create') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include old path attributes'
end
@@ -46,6 +47,7 @@ RSpec.describe Gitlab::HookData::GroupBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('group_destroy') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include old path attributes'
end
@@ -54,6 +56,7 @@ RSpec.describe Gitlab::HookData::GroupBuilder do
let(:event) { :rename }
it { expect(event_name).to eq('group_rename') }
+
it_behaves_like 'includes the required attributes'
it 'includes old path details' do
diff --git a/spec/lib/gitlab/hook_data/group_member_builder_spec.rb b/spec/lib/gitlab/hook_data/group_member_builder_spec.rb
index 78c62fd23c7..35ce31ab897 100644
--- a/spec/lib/gitlab/hook_data/group_member_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/group_member_builder_spec.rb
@@ -39,6 +39,7 @@ RSpec.describe Gitlab::HookData::GroupMemberBuilder do
let(:event) { :create }
it { expect(event_name).to eq('user_add_to_group') }
+
it_behaves_like 'includes the required attributes'
end
@@ -46,6 +47,7 @@ RSpec.describe Gitlab::HookData::GroupMemberBuilder do
let(:event) { :update }
it { expect(event_name).to eq('user_update_for_group') }
+
it_behaves_like 'includes the required attributes'
end
@@ -53,6 +55,7 @@ RSpec.describe Gitlab::HookData::GroupMemberBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('user_remove_from_group') }
+
it_behaves_like 'includes the required attributes'
end
end
diff --git a/spec/lib/gitlab/hook_data/key_builder_spec.rb b/spec/lib/gitlab/hook_data/key_builder_spec.rb
index 86f33df115f..2c87c9a10e6 100644
--- a/spec/lib/gitlab/hook_data/key_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/key_builder_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe Gitlab::HookData::KeyBuilder do
it { expect(event_name).to eq('key_create') }
it { expect(data[:username]).to eq(key.user.username) }
+
it_behaves_like 'includes the required attributes'
end
@@ -44,6 +45,7 @@ RSpec.describe Gitlab::HookData::KeyBuilder do
it { expect(event_name).to eq('key_destroy') }
it { expect(data[:username]).to eq(key.user.username) }
+
it_behaves_like 'includes the required attributes'
end
end
@@ -58,6 +60,7 @@ RSpec.describe Gitlab::HookData::KeyBuilder do
let(:event) { :create }
it { expect(event_name).to eq('key_create') }
+
it_behaves_like 'includes the required attributes'
end
@@ -65,6 +68,7 @@ RSpec.describe Gitlab::HookData::KeyBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('key_destroy') }
+
it_behaves_like 'includes the required attributes'
end
end
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index 25b84a67ab2..cb8fef60ab2 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
merge_user_id
merge_when_pipeline_succeeds
milestone_id
+ reviewer_ids
source_branch
source_project_id
state_id
@@ -72,6 +73,7 @@ RSpec.describe Gitlab::HookData::MergeRequestBuilder do
human_time_estimate
assignee_ids
assignee_id
+ reviewer_ids
labels
state
blocking_discussions_resolved
diff --git a/spec/lib/gitlab/hook_data/project_builder_spec.rb b/spec/lib/gitlab/hook_data/project_builder_spec.rb
index e86ac66b1ad..729712510ea 100644
--- a/spec/lib/gitlab/hook_data/project_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/project_builder_spec.rb
@@ -52,6 +52,7 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
let(:event) { :create }
it { expect(event_name).to eq('project_create') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include `old_path_with_namespace` attribute'
end
@@ -60,6 +61,7 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('project_destroy') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include `old_path_with_namespace` attribute'
end
@@ -68,6 +70,7 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
let(:event) { :rename }
it { expect(event_name).to eq('project_rename') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'includes `old_path_with_namespace` attribute'
end
@@ -76,6 +79,7 @@ RSpec.describe Gitlab::HookData::ProjectBuilder do
let(:event) { :transfer }
it { expect(event_name).to eq('project_transfer') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'includes `old_path_with_namespace` attribute'
end
diff --git a/spec/lib/gitlab/hook_data/project_member_builder_spec.rb b/spec/lib/gitlab/hook_data/project_member_builder_spec.rb
index 3fb84223581..76446adf7b7 100644
--- a/spec/lib/gitlab/hook_data/project_member_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/project_member_builder_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Gitlab::HookData::ProjectMemberBuilder do
let(:event) { :create }
it { expect(event_name).to eq('user_add_to_team') }
+
it_behaves_like 'includes the required attributes'
end
@@ -44,6 +45,7 @@ RSpec.describe Gitlab::HookData::ProjectMemberBuilder do
let(:event) { :update }
it { expect(event_name).to eq('user_update_for_team') }
+
it_behaves_like 'includes the required attributes'
end
@@ -51,6 +53,7 @@ RSpec.describe Gitlab::HookData::ProjectMemberBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('user_remove_from_team') }
+
it_behaves_like 'includes the required attributes'
end
end
diff --git a/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb b/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb
index 89e5dffd7b4..b25320af891 100644
--- a/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/subgroup_builder_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe Gitlab::HookData::SubgroupBuilder do
let(:event) { :create }
it { expect(event_name).to eq('subgroup_create') }
+
it_behaves_like 'includes the required attributes'
end
@@ -45,6 +46,7 @@ RSpec.describe Gitlab::HookData::SubgroupBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('subgroup_destroy') }
+
it_behaves_like 'includes the required attributes'
end
end
diff --git a/spec/lib/gitlab/hook_data/user_builder_spec.rb b/spec/lib/gitlab/hook_data/user_builder_spec.rb
index f971089850b..ae844308fb1 100644
--- a/spec/lib/gitlab/hook_data/user_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/user_builder_spec.rb
@@ -44,6 +44,7 @@ RSpec.describe Gitlab::HookData::UserBuilder do
let(:event) { :create }
it { expect(event_name).to eq('user_create') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include old username attributes'
it_behaves_like 'does not include state attributes'
@@ -53,6 +54,7 @@ RSpec.describe Gitlab::HookData::UserBuilder do
let(:event) { :destroy }
it { expect(event_name).to eq('user_destroy') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include old username attributes'
it_behaves_like 'does not include state attributes'
@@ -62,6 +64,7 @@ RSpec.describe Gitlab::HookData::UserBuilder do
let(:event) { :rename }
it { expect(event_name).to eq('user_rename') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include state attributes'
@@ -76,6 +79,7 @@ RSpec.describe Gitlab::HookData::UserBuilder do
let(:event) { :failed_login }
it { expect(event_name).to eq('user_failed_login') }
+
it_behaves_like 'includes the required attributes'
it_behaves_like 'does not include old username attributes'
diff --git a/spec/lib/gitlab/http_io_spec.rb b/spec/lib/gitlab/http_io_spec.rb
index 5ba0cb5e686..1376b726df3 100644
--- a/spec/lib/gitlab/http_io_spec.rb
+++ b/spec/lib/gitlab/http_io_spec.rb
@@ -262,7 +262,7 @@ RSpec.describe Gitlab::HttpIO do
end
it 'reads a trace' do
- expect { subject }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError)
+ expect { subject }.to raise_error(Gitlab::HttpIO::FailedToGetChunkError, 'Unexpected response code: 500')
end
end
diff --git a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
index 451fd6c6f46..42cf9c54798 100644
--- a/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
+++ b/spec/lib/gitlab/import_export/after_export_strategies/web_upload_strategy_spec.rb
@@ -9,12 +9,21 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
allow_next_instance_of(ProjectExportWorker) do |job|
allow(job).to receive(:jid).and_return(SecureRandom.hex(8))
end
+
+ stub_feature_flags(import_export_web_upload_stream: false)
+ stub_uploads_object_storage(FileUploader, enabled: false)
end
let(:example_url) { 'http://www.example.com' }
let(:strategy) { subject.new(url: example_url, http_method: 'post') }
- let!(:project) { create(:project, :with_export) }
- let!(:user) { build(:user) }
+ let(:user) { build(:user) }
+ let(:project) { import_export_upload.project }
+ let(:import_export_upload) do
+ create(
+ :import_export_upload,
+ export_file: fixture_file_upload('spec/fixtures/gitlab/import_export/lightweight_project_export.tar.gz')
+ )
+ end
subject { described_class }
@@ -36,20 +45,42 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
describe '#execute' do
context 'when upload succeeds' do
before do
- allow(strategy).to receive(:send_file)
- allow(strategy).to receive(:handle_response_error)
+ stub_full_request(example_url, method: :post).to_return(status: 200)
end
- it 'does not remove the exported project file after the upload' do
+ it 'does not remove the exported project file after the upload', :aggregate_failures do
expect(project).not_to receive(:remove_exports)
- strategy.execute(user, project)
+ expect { strategy.execute(user, project) }.not_to change(project, :export_status)
+
+ expect(project.export_status).to eq(:finished)
end
- it 'has finished export status' do
- strategy.execute(user, project)
+ it 'logs when upload starts and finishes' do
+ export_size = import_export_upload.export_file.size
+
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:info).ordered.with(
+ {
+ message: "Started uploading project",
+ project_id: project.id,
+ project_name: project.name,
+ export_size: export_size
+ }
+ )
+
+ expect(logger).to receive(:info).ordered.with(
+ {
+ message: "Finished uploading project",
+ project_id: project.id,
+ project_name: project.name,
+ export_size: export_size,
+ upload_duration: anything
+ }
+ )
+ end
- expect(project.export_status).to eq(:finished)
+ strategy.execute(user, project)
end
end
@@ -64,5 +95,124 @@ RSpec.describe Gitlab::ImportExport::AfterExportStrategies::WebUploadStrategy do
expect(errors.first).to eq "Error uploading the project. Code 404: Page not found"
end
end
+
+ context 'when object store is disabled' do
+ it 'reads file from disk and uploads to external url' do
+ stub_request(:post, example_url).to_return(status: 200)
+ expect(Gitlab::ImportExport::RemoteStreamUpload).not_to receive(:new)
+ expect(Gitlab::HttpIO).not_to receive(:new)
+
+ strategy.execute(user, project)
+
+ expect(a_request(:post, example_url)).to have_been_made
+ end
+ end
+
+ context 'when object store is enabled' do
+ before do
+ object_store_url = 'http://object-storage/project.tar.gz'
+ stub_uploads_object_storage(FileUploader)
+ stub_request(:get, object_store_url)
+ stub_request(:post, example_url)
+ allow(import_export_upload.export_file).to receive(:url).and_return(object_store_url)
+ allow(import_export_upload.export_file).to receive(:file_storage?).and_return(false)
+ end
+
+ it 'reads file using Gitlab::HttpIO and uploads to external url' do
+ expect_next_instance_of(Gitlab::HttpIO) do |http_io|
+ expect(http_io).to receive(:read).and_call_original
+ end
+ expect(Gitlab::ImportExport::RemoteStreamUpload).not_to receive(:new)
+
+ strategy.execute(user, project)
+
+ expect(a_request(:post, example_url)).to have_been_made
+ end
+ end
+
+ context 'when `import_export_web_upload_stream` feature is enabled' do
+ before do
+ stub_feature_flags(import_export_web_upload_stream: true)
+ end
+
+ context 'when remote object store is disabled' do
+ it 'reads file from disk and uploads to external url' do
+ stub_request(:post, example_url).to_return(status: 200)
+ expect(Gitlab::ImportExport::RemoteStreamUpload).not_to receive(:new)
+ expect(Gitlab::HttpIO).not_to receive(:new)
+
+ strategy.execute(user, project)
+
+ expect(a_request(:post, example_url)).to have_been_made
+ end
+ end
+
+ context 'when object store is enabled' do
+ let(:object_store_url) { 'http://object-storage/project.tar.gz' }
+
+ before do
+ stub_uploads_object_storage(FileUploader)
+
+ allow(import_export_upload.export_file).to receive(:url).and_return(object_store_url)
+ allow(import_export_upload.export_file).to receive(:file_storage?).and_return(false)
+ end
+
+ it 'uploads file as a remote stream' do
+ arguments = {
+ download_url: object_store_url,
+ upload_url: example_url,
+ options: {
+ upload_method: :post,
+ upload_content_type: 'application/gzip'
+ }
+ }
+
+ expect_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload, arguments) do |remote_stream_upload|
+ expect(remote_stream_upload).to receive(:execute)
+ end
+ expect(Gitlab::HttpIO).not_to receive(:new)
+
+ strategy.execute(user, project)
+ end
+
+ context 'when upload as remote stream raises an exception' do
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::RemoteStreamUpload) do |remote_stream_upload|
+ allow(remote_stream_upload).to receive(:execute).and_raise(
+ Gitlab::ImportExport::RemoteStreamUpload::StreamError.new('Exception error message', 'Response body')
+ )
+ end
+ end
+
+ it 'logs the exception and stores the error message' do
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:error).ordered.with(
+ {
+ project_id: project.id,
+ project_name: project.name,
+ message: 'Exception error message',
+ response_body: 'Response body'
+ }
+ )
+
+ expect(logger).to receive(:error).ordered.with(
+ {
+ project_id: project.id,
+ project_name: project.name,
+ message: 'After export strategy failed',
+ 'exception.class' => 'Gitlab::ImportExport::RemoteStreamUpload::StreamError',
+ 'exception.message' => 'Exception error message',
+ 'exception.backtrace' => anything
+ }
+ )
+ end
+
+ strategy.execute(user, project)
+
+ expect(project.import_export_shared.errors.first).to eq('Exception error message')
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 8c1e60e78b0..9aec3271913 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -140,6 +140,12 @@ project_members:
- project
- member_task
- member_namespace
+- member_role
+member_roles:
+- members
+- namespace
+- base_access_level
+- download_code
merge_requests:
- status_check_responses
- subscriptions
@@ -591,6 +597,7 @@ project:
- alert_management_alerts
- repository_storage_moves
- freeze_periods
+- pumble_integration
- webex_teams_integration
- build_report_results
- vulnerability_statistic
@@ -621,6 +628,7 @@ project:
- security_trainings
- vulnerability_reads
- build_artifacts_size_refresh
+- project_callouts
award_emoji:
- awardable
- user
@@ -646,6 +654,11 @@ search_data:
merge_request_assignees:
- merge_request
- assignee
+- updated_state_by
+merge_request_reviewers:
+- merge_request
+- reviewer
+- updated_state_by
lfs_file_locks:
- user
project_badges:
@@ -805,3 +818,6 @@ bulk_import_export:
- group
service_desk_setting:
- file_template_project
+approvals:
+ - user
+ - merge_request
diff --git a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
index b8999f608b1..4ef8f4b5d76 100644
--- a/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_factory_spec.rb
@@ -139,6 +139,30 @@ RSpec.describe Gitlab::ImportExport::Base::RelationFactory do
expect(subject.value).to be_nil
end
end
+
+ context 'with duplicate assignees' do
+ let(:relation_sym) { :issues }
+ let(:relation_hash) do
+ { "title" => "title", "state" => "opened" }.merge(issue_assignees)
+ end
+
+ context 'when duplicate assignees are present' do
+ let(:issue_assignees) do
+ {
+ "issue_assignees" => [
+ IssueAssignee.new(user_id: 1),
+ IssueAssignee.new(user_id: 2),
+ IssueAssignee.new(user_id: 1),
+ { user_id: 3 }
+ ]
+ }
+ end
+
+ it 'removes duplicate assignees' do
+ expect(subject.issue_assignees.map(&:user_id)).to contain_exactly(1, 2)
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
index 7c84b9604a6..9f1b15aa049 100644
--- a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
@@ -58,8 +58,8 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
end
context 'when subrelation collection count is small' do
- let(:notes) { build_list(:note, 2, project: project, importing: true) }
- let(:relation_object) { build(:issue, project: project, notes: notes) }
+ let(:note) { build(:note, project: project, importing: true) }
+ let(:relation_object) { build(:issue, project: project, notes: [note]) }
let(:relation_definition) { { 'notes' => {} } }
it 'saves subrelation as part of the relation object itself' do
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
saver.execute
issue = project.issues.last
- expect(issue.notes.count).to eq(2)
+ expect(issue.notes.count).to eq(1)
end
end
diff --git a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
index dea584e5019..9af72cc0dea 100644
--- a/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
+++ b/spec/lib/gitlab/import_export/decompressed_archive_size_validator_spec.rb
@@ -51,10 +51,11 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
shared_examples 'logs raised exception and terminates validator process group' do
let(:std) { double(:std, close: nil, value: nil) }
let(:wait_thr) { double }
+ let(:wait_threads) { [wait_thr, wait_thr] }
before do
allow(Process).to receive(:getpgid).and_return(2)
- allow(Open3).to receive(:popen3).and_return([std, std, std, wait_thr])
+ allow(Open3).to receive(:pipeline_r).and_return([std, wait_threads])
allow(wait_thr).to receive(:[]).with(:pid).and_return(1)
allow(wait_thr).to receive(:value).and_raise(exception)
end
@@ -67,7 +68,7 @@ RSpec.describe Gitlab::ImportExport::DecompressedArchiveSizeValidator do
import_upload_archive_size: File.size(filepath),
message: error_message
)
- expect(Process).to receive(:kill).with(-1, 2)
+ expect(Process).to receive(:kill).with(-1, 2).twice
expect(subject.valid?).to eq(false)
end
end
diff --git a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
index 9b01005c2e9..89ae869ae86 100644
--- a/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/group/tree_restorer_spec.rb
@@ -204,19 +204,5 @@ RSpec.describe Gitlab::ImportExport::Group::TreeRestorer do
end
end
- context 'when import_relation_object_persistence feature flag is enabled' do
- before do
- stub_feature_flags(import_relation_object_persistence: true)
- end
-
- include_examples 'group restoration'
- end
-
- context 'when import_relation_object_persistence feature flag is disabled' do
- before do
- stub_feature_flags(import_relation_object_persistence: false)
- end
-
- include_examples 'group restoration'
- end
+ include_examples 'group restoration'
end
diff --git a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
index 90966cb4915..51c0008b2b4 100644
--- a/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
+++ b/spec/lib/gitlab/import_export/import_test_coverage_spec.rb
@@ -88,8 +88,8 @@ RSpec.describe 'Test coverage of the Project Import' do
def relations_from_json(json_file)
json = Gitlab::Json.parse(IO.read(json_file))
- [].tap {|res| gather_relations({ project: json }, res, [])}
- .map {|relation_names| relation_names.join('.')}
+ [].tap { |res| gather_relations({ project: json }, res, []) }
+ .map { |relation_names| relation_names.join('.') }
end
def gather_relations(item, res, path)
@@ -103,7 +103,7 @@ RSpec.describe 'Test coverage of the Project Import' do
end
end
when Array
- item.each {|i| gather_relations(i, res, path)}
+ item.each { |i| gather_relations(i, res, path) }
end
end
diff --git a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
index 9be95591ae9..452d63d548e 100644
--- a/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/ndjson_writer_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::ImportExport::Json::NdjsonWriter do
file_path = File.join(path, exportable_path, "#{relation}.ndjson")
subject.write_relation(exportable_path, relation, values[0])
- expect {subject.write_relation(exportable_path, relation, values[1])}.to raise_exception("The #{file_path} already exist")
+ expect { subject.write_relation(exportable_path, relation, values[1]) }.to raise_exception("The #{file_path} already exist")
end
end
end
diff --git a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
index 3f73a730744..3088129a732 100644
--- a/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/json/streaming_serializer_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
end
let(:exportable_path) { 'project' }
+ let(:logger) { Gitlab::Export::Logger.build }
let(:json_writer) { instance_double('Gitlab::ImportExport::Json::LegacyWriter') }
let(:hash) { { name: exportable.name, description: exportable.description }.stringify_keys }
let(:include) { [] }
@@ -42,7 +43,7 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
end
subject do
- described_class.new(exportable, relations_schema, json_writer, exportable_path: exportable_path)
+ described_class.new(exportable, relations_schema, json_writer, exportable_path: exportable_path, logger: logger)
end
describe '#execute' do
@@ -73,6 +74,21 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
subject.execute
end
+ it 'logs the relation name and the number of records to export' do
+ allow(json_writer).to receive(:write_relation_array)
+ allow(logger).to receive(:info)
+
+ subject.execute
+
+ expect(logger).to have_received(:info).with(
+ importer: 'Import/Export',
+ message: "Exporting issues relation. Number of records to export: 16",
+ project_id: exportable.id,
+ project_name: exportable.name,
+ project_path: exportable.full_path
+ )
+ end
+
context 'default relation ordering' do
it 'orders exported issues by primary key(:id)' do
expected_issues = exportable.issues.reorder(:id).map(&:to_json)
@@ -138,6 +154,21 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
subject.execute
end
+
+ it 'logs the relation name' do
+ allow(json_writer).to receive(:write_relation)
+ allow(logger).to receive(:info)
+
+ subject.execute
+
+ expect(logger).to have_received(:info).with(
+ importer: 'Import/Export',
+ message: 'Exporting group relation',
+ project_id: exportable.id,
+ project_name: exportable.name,
+ project_path: exportable.full_path
+ )
+ end
end
context 'with array relation' do
@@ -155,6 +186,21 @@ RSpec.describe Gitlab::ImportExport::Json::StreamingSerializer do
subject.execute
end
+
+ it 'logs the relation name and the number of records to export' do
+ allow(json_writer).to receive(:write_relation_array)
+ allow(logger).to receive(:info)
+
+ subject.execute
+
+ expect(logger).to have_received(:info).with(
+ importer: 'Import/Export',
+ message: 'Exporting project_members relation. Number of records to export: 1',
+ project_id: exportable.id,
+ project_name: exportable.name,
+ project_path: exportable.full_path
+ )
+ end
end
describe 'load balancing' do
diff --git a/spec/lib/gitlab/import_export/log_util_spec.rb b/spec/lib/gitlab/import_export/log_util_spec.rb
new file mode 100644
index 00000000000..2b1a4b7bb61
--- /dev/null
+++ b/spec/lib/gitlab/import_export/log_util_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::LogUtil do
+ describe '.exportable_to_log_payload' do
+ subject { described_class.exportable_to_log_payload(exportable) }
+
+ context 'when exportable is a group' do
+ let(:exportable) { build_stubbed(:group) }
+
+ it 'returns hash with group keys' do
+ expect(subject).to be_a(Hash)
+ expect(subject.keys).to eq(%i[group_id group_name group_path])
+ end
+ end
+
+ context 'when exportable is a project' do
+ let(:exportable) { build_stubbed(:project) }
+
+ it 'returns hash with project keys' do
+ expect(subject).to be_a(Hash)
+ expect(subject.keys).to eq(%i[project_id project_name project_path])
+ end
+ end
+
+ context 'when exportable is a new record' do
+ let(:exportable) { Project.new }
+
+ it 'returns empty hash' do
+ expect(subject).to eq({})
+ end
+ end
+
+ context 'when exportable is an unexpected type' do
+ let(:exportable) { build_stubbed(:issue) }
+
+ it 'returns empty hash' do
+ expect(subject).to eq({})
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/relation_saver_spec.rb b/spec/lib/gitlab/import_export/project/relation_saver_spec.rb
new file mode 100644
index 00000000000..dec51b3afd1
--- /dev/null
+++ b/spec/lib/gitlab/import_export/project/relation_saver_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::Project::RelationSaver do
+ include ImportExport::CommonUtil
+
+ subject(:relation_saver) { described_class.new(project: project, shared: shared, relation: relation) }
+
+ let_it_be(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let_it_be(:project) { setup_project }
+
+ let(:relation) { Projects::ImportExport::RelationExport::ROOT_RELATION }
+ let(:shared) do
+ shared = project.import_export_shared
+ allow(shared).to receive(:export_path).and_return(export_path)
+ shared
+ end
+
+ after do
+ FileUtils.rm_rf(export_path)
+ end
+
+ describe '#save' do
+ context 'when relation is the root node' do
+ let(:relation) { Projects::ImportExport::RelationExport::ROOT_RELATION }
+
+ it 'serializes the root node as a json file in the export path' do
+ relation_saver.save # rubocop:disable Rails/SaveBang
+
+ json = read_json(File.join(shared.export_path, 'project.json'))
+ expect(json).to include({ 'description' => 'Project description' })
+ end
+
+ it 'serializes only allowed attributes' do
+ relation_saver.save # rubocop:disable Rails/SaveBang
+
+ json = read_json(File.join(shared.export_path, 'project.json'))
+ expect(json).to include({ 'description' => 'Project description' })
+ expect(json.keys).not_to include('name')
+ end
+
+ it 'successfuly serializes without errors' do
+ result = relation_saver.save # rubocop:disable Rails/SaveBang
+
+ expect(result).to eq(true)
+ expect(shared.errors).to be_empty
+ end
+ end
+
+ context 'when relation is a child node' do
+ let(:relation) { 'labels' }
+
+ it 'serializes the child node as a ndjson file in the export path inside the project folder' do
+ relation_saver.save # rubocop:disable Rails/SaveBang
+
+ ndjson = read_ndjson(File.join(shared.export_path, 'project', "#{relation}.ndjson"))
+ expect(ndjson.first).to include({ 'title' => 'Label 1' })
+ expect(ndjson.second).to include({ 'title' => 'Label 2' })
+ end
+
+ it 'serializes only allowed attributes' do
+ relation_saver.save # rubocop:disable Rails/SaveBang
+
+ ndjson = read_ndjson(File.join(shared.export_path, 'project', "#{relation}.ndjson"))
+ expect(ndjson.first.keys).not_to include('description_html')
+ end
+
+ it 'successfuly serializes without errors' do
+ result = relation_saver.save # rubocop:disable Rails/SaveBang
+
+ expect(result).to eq(true)
+ expect(shared.errors).to be_empty
+ end
+ end
+
+ context 'when relation name is not supported' do
+ let(:relation) { 'unknown' }
+
+ it 'returns false and register the error' do
+ result = relation_saver.save # rubocop:disable Rails/SaveBang
+
+ expect(result).to eq(false)
+ expect(shared.errors).to be_present
+ end
+ end
+
+ context 'when an exception occurs during serialization' do
+ it 'returns false and register the exception error message' do
+ allow_next_instance_of(Gitlab::ImportExport::Json::StreamingSerializer) do |serializer|
+ allow(serializer).to receive(:serialize_root).and_raise('Error!')
+ end
+
+ result = relation_saver.save # rubocop:disable Rails/SaveBang
+
+ expect(result).to eq(false)
+ expect(shared.errors).to include('Error!')
+ end
+ end
+ end
+
+ def setup_project
+ project = create(:project,
+ description: 'Project description'
+ )
+
+ create(:label, project: project, title: 'Label 1')
+ create(:label, project: project, title: 'Label 2')
+
+ project
+ end
+
+ def read_json(path)
+ Gitlab::Json.parse(IO.read(path))
+ end
+
+ def read_ndjson(path)
+ relations = []
+ File.foreach(path) do |line|
+ json = Gitlab::Json.parse(line)
+ relations << json
+ end
+ relations
+ end
+end
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index 157cd408da9..47d7555c8f4 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -254,6 +254,16 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
+ it 'has multiple merge request assignees' do
+ expect(MergeRequest.find_by(title: 'MR1').assignees).to contain_exactly(@user, *@existing_members)
+ expect(MergeRequest.find_by(title: 'MR2').assignees).to be_empty
+ end
+
+ it 'has multiple merge request reviewers' do
+ expect(MergeRequest.find_by(title: 'MR1').reviewers).to contain_exactly(@user, *@existing_members)
+ expect(MergeRequest.find_by(title: 'MR2').reviewers).to be_empty
+ end
+
it 'has labels associated to label links, associated to issues' do
expect(Label.first.label_links.first.target).not_to be_nil
end
@@ -262,6 +272,11 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
expect(ProjectLabel.count).to eq(3)
end
+ it 'has merge request approvals' do
+ expect(MergeRequest.find_by(title: 'MR1').approvals.pluck(:user_id)).to contain_exactly(@user.id, *@existing_members.map(&:id))
+ expect(MergeRequest.find_by(title: 'MR2').approvals).to be_empty
+ end
+
it 'has no group labels' do
expect(GroupLabel.count).to eq(0)
end
@@ -589,7 +604,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it 'issue system note metadata restored successfully' do
note_content = 'created merge request !1 to address this issue'
- note = project.issues.first.notes.find { |n| n.note.match(/#{note_content}/)}
+ note = project.issues.first.notes.find { |n| n.note.match(/#{note_content}/) }
expect(note.noteable_type).to eq('Issue')
expect(note.system).to eq(true)
@@ -1085,35 +1100,13 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
end
end
- context 'when import_relation_object_persistence feature flag is enabled' do
- before do
- stub_feature_flags(import_relation_object_persistence: true)
- end
-
- context 'enable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, true
+ context 'enable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, true
- it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
- end
-
- context 'disable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, false
- end
+ it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
end
- context 'when import_relation_object_persistence feature flag is disabled' do
- before do
- stub_feature_flags(import_relation_object_persistence: false)
- end
-
- context 'enable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, true
-
- it_behaves_like 'project tree restorer work properly', :ndjson_reader, true
- end
-
- context 'disable ndjson import' do
- it_behaves_like 'project tree restorer work properly', :legacy_reader, false
- end
+ context 'disable ndjson import' do
+ it_behaves_like 'project tree restorer work properly', :legacy_reader, false
end
end
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index ba781ae78b7..15108d28bf2 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -68,6 +68,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
it 'has merge request\'s milestones' do
expect(subject.first['milestone']).not_to be_empty
end
+
it 'has merge request\'s source branch SHA' do
expect(subject.first['source_branch_sha']).to eq('b83d6e391c22777fca1ed3012fce84f633d7fed0')
end
@@ -100,9 +101,30 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
expect(subject.first['notes'].first['author']).not_to be_empty
end
+ it 'has merge request approvals' do
+ approval = subject.first['approvals'].first
+
+ expect(approval).not_to be_nil
+ expect(approval['user_id']).to eq(user.id)
+ end
+
it 'has merge request resource label events' do
expect(subject.first['resource_label_events']).not_to be_empty
end
+
+ it 'has merge request assignees' do
+ reviewer = subject.first['merge_request_assignees'].first
+
+ expect(reviewer).not_to be_nil
+ expect(reviewer['user_id']).to eq(user.id)
+ end
+
+ it 'has merge request reviewers' do
+ reviewer = subject.first['merge_request_reviewers'].first
+
+ expect(reviewer).not_to be_nil
+ expect(reviewer['user_id']).to eq(user.id)
+ end
end
context 'with snippets' do
@@ -404,7 +426,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
context 'when streaming has to retry', :aggregate_failures do
let(:shared) { double('shared', export_path: exportable_path) }
- let(:logger) { Gitlab::Import::Logger.build }
+ let(:logger) { Gitlab::Export::Logger.build }
let(:serializer) { double('serializer') }
let(:error_class) { Net::OpenTimeout }
let(:info_params) do
@@ -468,7 +490,8 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
create(:label_link, label: group_label, target: issue)
create(:label_priority, label: group_label, priority: 1)
milestone = create(:milestone, project: project)
- merge_request = create(:merge_request, source_project: project, milestone: milestone)
+ merge_request = create(:merge_request, source_project: project, milestone: milestone, assignees: [user], reviewers: [user])
+ create(:approval, merge_request: merge_request, user: user)
ci_build = create(:ci_build, project: project, when: nil)
ci_build.pipeline.update!(project: project)
diff --git a/spec/lib/gitlab/import_export/remote_stream_upload_spec.rb b/spec/lib/gitlab/import_export/remote_stream_upload_spec.rb
new file mode 100644
index 00000000000..b1bc6b7eeaf
--- /dev/null
+++ b/spec/lib/gitlab/import_export/remote_stream_upload_spec.rb
@@ -0,0 +1,232 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::ImportExport::RemoteStreamUpload do
+ include StubRequests
+
+ subject do
+ described_class.new(
+ download_url: download_url,
+ upload_url: upload_url,
+ options: {
+ upload_method: upload_method,
+ upload_content_type: upload_content_type
+ }
+ )
+ end
+
+ let(:download_url) { 'http://object-storage/file.txt' }
+ let(:upload_url) { 'http://example.com/file.txt' }
+ let(:upload_method) { :post }
+ let(:upload_content_type) { 'text/plain' }
+
+ describe '#execute' do
+ context 'when download request and upload request return 200' do
+ it 'uploads the downloaded content' do
+ stub_request(:get, download_url).to_return(status: 200, body: 'ABC', headers: { 'Content-Length' => 3 })
+ stub_request(:post, upload_url)
+
+ subject.execute
+
+ expect(
+ a_request(:post, upload_url).with(
+ body: 'ABC', headers: { 'Content-Length' => 3, 'Content-Type' => 'text/plain' }
+ )
+ ).to have_been_made
+ end
+ end
+
+ context 'when upload method is put' do
+ let(:upload_method) { :put }
+
+ it 'uploads using the put method' do
+ stub_request(:get, download_url).to_return(status: 200, body: 'ABC', headers: { 'Content-Length' => 3 })
+ stub_request(:put, upload_url)
+
+ subject.execute
+
+ expect(
+ a_request(:put, upload_url).with(
+ body: 'ABC', headers: { 'Content-Length' => 3, 'Content-Type' => 'text/plain' }
+ )
+ ).to have_been_made
+ end
+ end
+
+ context 'when download request does not return 200' do
+ it do
+ stub_request(:get, download_url).to_return(status: 404)
+
+ expect { subject.execute }.to raise_error(
+ Gitlab::ImportExport::RemoteStreamUpload::StreamError,
+ "Invalid response code while downloading file. Code: 404"
+ )
+ end
+ end
+
+ context 'when upload request does not returns 200' do
+ it do
+ stub_request(:get, download_url).to_return(status: 200, body: 'ABC', headers: { 'Content-Length' => 3 })
+ stub_request(:post, upload_url).to_return(status: 403)
+
+ expect { subject.execute }.to raise_error(
+ Gitlab::ImportExport::RemoteStreamUpload::StreamError,
+ "Invalid response code while uploading file. Code: 403"
+ )
+ end
+ end
+
+ context 'when download URL is a local address' do
+ let(:download_url) { 'http://127.0.0.1/file.txt' }
+
+ before do
+ stub_request(:get, download_url)
+ stub_request(:post, upload_url)
+ end
+
+ it 'raises error' do
+ expect { subject.execute }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://127.0.0.1/file.txt' is blocked: Requests to localhost are not allowed"
+ )
+ end
+
+ context 'when local requests are allowed' do
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ it 'raises does not error' do
+ expect { subject.execute }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when download URL is a local network' do
+ let(:download_url) { 'http://172.16.0.0/file.txt' }
+
+ before do
+ stub_request(:get, download_url)
+ stub_request(:post, upload_url)
+ end
+
+ it 'raises error' do
+ expect { subject.execute }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://172.16.0.0/file.txt' is blocked: Requests to the local network are not allowed"
+ )
+ end
+
+ context 'when local network requests are allowed' do
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ it 'raises does not error' do
+ expect { subject.execute }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when upload URL is a local address' do
+ let(:upload_url) { 'http://127.0.0.1/file.txt' }
+
+ before do
+ stub_request(:get, download_url)
+ stub_request(:post, upload_url)
+ end
+
+ it 'raises error' do
+ stub_request(:get, download_url)
+
+ expect { subject.execute }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://127.0.0.1/file.txt' is blocked: Requests to localhost are not allowed"
+ )
+ end
+
+ context 'when local requests are allowed' do
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ it 'raises does not error' do
+ expect { subject.execute }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when upload URL it is a request to local network' do
+ let(:upload_url) { 'http://172.16.0.0/file.txt' }
+
+ before do
+ stub_request(:get, download_url)
+ stub_request(:post, upload_url)
+ end
+
+ it 'raises error' do
+ expect { subject.execute }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://172.16.0.0/file.txt' is blocked: Requests to the local network are not allowed"
+ )
+ end
+
+ context 'when local network requests are allowed' do
+ before do
+ stub_application_setting(allow_local_requests_from_web_hooks_and_services: true)
+ end
+
+ it 'raises does not error' do
+ expect { subject.execute }.not_to raise_error
+ end
+ end
+ end
+
+ context 'when upload URL resolves to a local address' do
+ let(:upload_url) { 'http://example.com/file.txt' }
+
+ it 'raises error' do
+ stub_request(:get, download_url)
+ stub_full_request(upload_url, ip_address: '127.0.0.1', method: upload_method)
+
+ expect { subject.execute }.to raise_error(
+ Gitlab::HTTP::BlockedUrlError,
+ "URL 'http://example.com/file.txt' is blocked: Requests to localhost are not allowed"
+ )
+ end
+ end
+ end
+
+ describe Gitlab::ImportExport::RemoteStreamUpload::ChunkStream do
+ describe 'StringIO#copy_stream compatibility' do
+ it 'copies all chunks' do
+ chunks = %w[ABC EFD].to_enum
+ chunk_stream = described_class.new(chunks)
+ new_stream = StringIO.new
+
+ IO.copy_stream(chunk_stream, new_stream)
+ new_stream.rewind
+
+ expect(new_stream.read).to eq('ABCEFD')
+ end
+
+ context 'with chunks smaller and bigger than buffer size' do
+ before do
+ stub_const('Gitlab::ImportExport::RemoteStreamUpload::ChunkStream::DEFAULT_BUFFER_SIZE', 4)
+ end
+
+ it 'copies all chunks' do
+ chunks = %w[A BC DEF GHIJ KLMNOPQ RSTUVWXYZ].to_enum
+ chunk_stream = described_class.new(chunks)
+ new_stream = StringIO.new
+
+ IO.copy_stream(chunk_stream, new_stream)
+ new_stream.rewind
+
+ expect(new_stream.read).to eq('ABCDEFGHIJKLMNOPQRSTUVWXYZ')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index bd60bb53d49..6cfc24a8996 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -521,7 +521,6 @@ Project:
- star_count
- ci_id
- shared_runners_enabled
-- build_coverage_regex
- build_allow_git_fetchs
- build_timeout
- pending_delete
@@ -584,6 +583,9 @@ ProjectFeature:
- security_and_compliance_access_level
- container_registry_access_level
- package_registry_access_level
+- environments_access_level
+- feature_flags_access_level
+- releases_access_level
- created_at
- updated_at
ProtectedBranch::MergeAccessLevel:
@@ -741,6 +743,14 @@ MergeRequestAssignee:
- id
- user_id
- merge_request_id
+- created_at
+- state
+MergeRequestReviewer:
+- id
+- user_id
+- merge_request_id
+- created_at
+- state
ProjectMetricsSetting:
- project_id
- external_dashboard_url
@@ -903,3 +913,7 @@ MergeRequest::CleanupSchedule:
- completed_at
- created_at
- updated_at
+Approval:
+ - user_id
+ - created_at
+ - updated_at
diff --git a/spec/lib/gitlab/import_export/shared_spec.rb b/spec/lib/gitlab/import_export/shared_spec.rb
index 1945156ca59..408ed3a2176 100644
--- a/spec/lib/gitlab/import_export/shared_spec.rb
+++ b/spec/lib/gitlab/import_export/shared_spec.rb
@@ -68,12 +68,18 @@ RSpec.describe Gitlab::ImportExport::Shared do
expect(subject.errors).to eq(['Error importing into [FILTERED] Permission denied @ unlink_internal - [FILTERED]'])
end
- it 'updates the import JID' do
+ it 'tracks exception' do
import_state = create(:import_state, project: project, jid: 'jid-test')
expect(Gitlab::ErrorTracking)
.to receive(:track_exception)
- .with(error, hash_including(import_jid: import_state.jid))
+ .with(error, hash_including(
+ importer: 'Import/Export',
+ project_id: project.id,
+ project_name: project.name,
+ project_path: project.full_path,
+ import_jid: import_state.jid
+ ))
subject.error(error)
end
diff --git a/spec/lib/gitlab/import_export/version_checker_spec.rb b/spec/lib/gitlab/import_export/version_checker_spec.rb
index 9e69e04b17c..14c62edb786 100644
--- a/spec/lib/gitlab/import_export/version_checker_spec.rb
+++ b/spec/lib/gitlab/import_export/version_checker_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Gitlab::ImportExport::VersionChecker do
end
context 'newer version' do
- let(:version) { '900.0'}
+ let(:version) { '900.0' }
it 'returns false if export version is newer' do
expect(described_class.check!(shared: shared)).to be false
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 79d626386d4..4fa9079144d 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -195,6 +195,28 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload[:uploaded_file_size_bytes]).to eq(uploaded_file.size)
end
end
+
+ context 'when an api call to the search api is made' do
+ before do
+ Gitlab::Instrumentation::GlobalSearchApi.set_information(
+ type: 'basic',
+ level: 'global',
+ scope: 'issues',
+ search_duration_s: 0.1
+ )
+ end
+
+ it 'adds search data' do
+ subject
+
+ expect(payload).to include({
+ 'meta.search.type' => 'basic',
+ 'meta.search.level' => 'global',
+ 'meta.search.scope' => 'issues',
+ global_search_duration_s: 0.1
+ })
+ end
+ end
end
describe 'duration calculations' do
diff --git a/spec/lib/gitlab/jira/dvcs_spec.rb b/spec/lib/gitlab/jira/dvcs_spec.rb
index 09e777b38ea..76d81343875 100644
--- a/spec/lib/gitlab/jira/dvcs_spec.rb
+++ b/spec/lib/gitlab/jira/dvcs_spec.rb
@@ -24,8 +24,8 @@ RSpec.describe Gitlab::Jira::Dvcs do
end
describe '.encode_project_name' do
- let(:group) { create(:group)}
- let(:project) { create(:project, group: group)}
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
context 'root group' do
it 'returns project path' do
@@ -34,7 +34,7 @@ RSpec.describe Gitlab::Jira::Dvcs do
end
context 'nested group' do
- let(:group) { create(:group, :nested)}
+ let(:group) { create(:group, :nested) }
it 'returns encoded project full path' do
expect(described_class.encode_project_name(project)).to eq(described_class.encode_slash(project.full_path))
diff --git a/spec/lib/gitlab/jira_import/issues_importer_spec.rb b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
index 1bc052ee0b6..a2a482dde7c 100644
--- a/spec/lib/gitlab/jira_import/issues_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/issues_importer_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
context 'with results returned' do
jira_issue = Struct.new(:id)
- let_it_be(:jira_issues) { [jira_issue.new(1), jira_issue.new(2)] }
+ let_it_be(:jira_issues) { [jira_issue.new(1), jira_issue.new(2), jira_issue.new(3)] }
def mock_issue_serializer(count, raise_exception_on_even_mocks: false)
serializer = instance_double(Gitlab::JiraImport::IssueSerializer, execute: { key: 'data' })
@@ -125,6 +125,47 @@ RSpec.describe Gitlab::JiraImport::IssuesImporter do
expect(Gitlab::JiraImport.get_issues_next_start_at(project.id)).to eq(2)
end
end
+
+ context 'when number of issues is above the threshold' do
+ before do
+ stub_const("#{described_class.name}::JIRA_IMPORT_THRESHOLD", 2)
+ stub_const("#{described_class.name}::JIRA_IMPORT_PAUSE_LIMIT", 1)
+ allow(Gitlab::ErrorTracking).to receive(:track_exception)
+ allow_next_instance_of(Gitlab::JobWaiter) do |job_waiter|
+ allow(job_waiter).to receive(:wait).with(5).and_return(job_waiter.wait(0.1))
+ end
+ end
+
+ it 'schedules 2 import jobs with two pause points' do
+ expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0], jira_issues[1], jira_issues[2]])
+ expect(Gitlab::JiraImport::ImportIssueWorker).to receive(:perform_async).exactly(3).times
+ expect(Gitlab::JiraImport::ImportIssueWorker)
+ .to receive(:queue_size)
+ .exactly(6).times
+ .and_return(1, 2, 3, 2, 1, 0)
+
+ mock_issue_serializer(3)
+
+ expect(subject.execute).to have_received(:wait).with(5).twice
+ end
+
+ it 'tracks the exception if the queue size does not reduce' do
+ expect(subject).to receive(:fetch_issues).with(0).and_return([jira_issues[0]])
+ expect(Gitlab::JiraImport::ImportIssueWorker).not_to receive(:perform_async)
+ expect(Gitlab::JiraImport::ImportIssueWorker)
+ .to receive(:queue_size)
+ .exactly(11).times
+ .and_return(3)
+
+ mock_issue_serializer(1)
+
+ expect(subject.execute).to have_received(:wait).with(5).exactly(10).times
+ expect(Gitlab::ErrorTracking)
+ .to have_received(:track_exception)
+ .with(described_class::RetriesExceededError, { project_id: project.id })
+ .once
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/kubernetes/rollout_status_spec.rb b/spec/lib/gitlab/kubernetes/rollout_status_spec.rb
index 8ed9fdd799c..21d345f0739 100644
--- a/spec/lib/gitlab/kubernetes/rollout_status_spec.rb
+++ b/spec/lib/gitlab/kubernetes/rollout_status_spec.rb
@@ -213,7 +213,7 @@ RSpec.describe Gitlab::Kubernetes::RolloutStatus do
let(:specs) { specs_half_finished }
- it { is_expected.to be_falsy}
+ it { is_expected.to be_falsy }
end
end
diff --git a/spec/lib/gitlab/mail_room/mail_room_spec.rb b/spec/lib/gitlab/mail_room/mail_room_spec.rb
index 06a25be757e..0c2c9b89005 100644
--- a/spec/lib/gitlab/mail_room/mail_room_spec.rb
+++ b/spec/lib/gitlab/mail_room/mail_room_spec.rb
@@ -246,7 +246,7 @@ RSpec.describe Gitlab::MailRoom do
redis_url: "localhost",
redis_db: 99,
namespace: "resque:gitlab",
- queue: "email_receiver",
+ queue: "default",
worker: "EmailReceiverWorker",
sentinels: [{ host: "localhost", port: 1234 }]
}
@@ -259,7 +259,7 @@ RSpec.describe Gitlab::MailRoom do
redis_url: "localhost",
redis_db: 99,
namespace: "resque:gitlab",
- queue: "service_desk_email_receiver",
+ queue: "default",
worker: "ServiceDeskEmailReceiverWorker",
sentinels: [{ host: "localhost", port: 1234 }]
}
diff --git a/spec/lib/gitlab/memory/jemalloc_spec.rb b/spec/lib/gitlab/memory/jemalloc_spec.rb
index 8847516b52c..482ac6e5802 100644
--- a/spec/lib/gitlab/memory/jemalloc_spec.rb
+++ b/spec/lib/gitlab/memory/jemalloc_spec.rb
@@ -28,11 +28,12 @@ RSpec.describe Gitlab::Memory::Jemalloc do
describe '.dump_stats' do
it 'writes stats JSON file' do
- described_class.dump_stats(path: outdir, format: format)
+ file_path = described_class.dump_stats(path: outdir, format: format)
file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.json$/) }
expect(file).not_to be_nil
- expect(File.read(File.join(outdir, file))).to eq(output)
+ expect(file_path).to eq(File.join(outdir, file))
+ expect(File.read(file_path)).to eq(output)
end
end
end
@@ -52,12 +53,22 @@ RSpec.describe Gitlab::Memory::Jemalloc do
end
describe '.dump_stats' do
- it 'writes stats text file' do
- described_class.dump_stats(path: outdir, format: format)
+ shared_examples 'writes stats text file' do |filename_label, filename_pattern|
+ it do
+ described_class.dump_stats(path: outdir, format: format, filename_label: filename_label)
+
+ file = Dir.entries(outdir).find { |e| e.match(filename_pattern) }
+ expect(file).not_to be_nil
+ expect(File.read(File.join(outdir, file))).to eq(output)
+ end
+ end
- file = Dir.entries(outdir).find { |e| e.match(/jemalloc_stats\.#{$$}\.\d+\.txt$/) }
- expect(file).not_to be_nil
- expect(File.read(File.join(outdir, file))).to eq(output)
+ context 'when custom filename label is passed' do
+ include_examples 'writes stats text file', 'puma_0', /jemalloc_stats\.#{$$}\.puma_0\.\d+\.txt$/
+ end
+
+ context 'when custom filename label is not passed' do
+ include_examples 'writes stats text file', nil, /jemalloc_stats\.#{$$}\.\d+\.txt$/
end
end
end
diff --git a/spec/lib/gitlab/memory/reports/jemalloc_stats_spec.rb b/spec/lib/gitlab/memory/reports/jemalloc_stats_spec.rb
new file mode 100644
index 00000000000..53fae48776b
--- /dev/null
+++ b/spec/lib/gitlab/memory/reports/jemalloc_stats_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Memory::Reports::JemallocStats do
+ let(:reports_dir) { '/empty-dir' }
+ let(:jemalloc_stats) { described_class.new(reports_path: reports_dir) }
+
+ describe '.run' do
+ context 'when :report_jemalloc_stats ops FF is enabled' do
+ let(:worker_id) { 'puma_1' }
+ let(:report_name) { 'report.json' }
+ let(:report_path) { File.join(reports_dir, report_name) }
+
+ before do
+ allow(Prometheus::PidProvider).to receive(:worker_id).and_return(worker_id)
+ end
+
+ it 'invokes Jemalloc.dump_stats and returns file path' do
+ expect(Gitlab::Memory::Jemalloc)
+ .to receive(:dump_stats).with(path: reports_dir, filename_label: worker_id).and_return(report_path)
+
+ expect(jemalloc_stats.run).to eq(report_path)
+ end
+
+ describe 'reports cleanup' do
+ let_it_be(:outdir) { Dir.mktmpdir }
+
+ let(:jemalloc_stats) { described_class.new(reports_path: outdir) }
+
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_JEMALLOC_MAX_REPORTS_STORED', 3)
+ allow(Gitlab::Memory::Jemalloc).to receive(:dump_stats)
+ end
+
+ after do
+ FileUtils.rm_f(outdir)
+ end
+
+ context 'when number of reports exceeds `max_reports_stored`' do
+ let_it_be(:reports) do
+ now = Time.current
+
+ (1..5).map do |i|
+ Tempfile.new("jemalloc_stats.#{i}.worker_#{i}.#{Time.current.to_i}.json", outdir).tap do |f|
+ FileUtils.touch(f, mtime: (now + i.second).to_i)
+ end
+ end
+ end
+
+ after do
+ reports.each do |f|
+ f.close
+ f.unlink
+ rescue Errno::ENOENT
+ # Some of the files are already unlinked by the code we test; Ignore
+ end
+ end
+
+ it 'keeps only `max_reports_stored` total newest files' do
+ expect { jemalloc_stats.run }
+ .to change { Dir.entries(outdir).count { |e| e.match(/jemalloc_stats.*/) } }
+ .from(5).to(3)
+
+ # Keeps only the newest reports
+ expect(reports.last(3).all? { |r| File.exist?(r) }).to be true
+ end
+ end
+
+ context 'when number of reports does not exceed `max_reports_stored`' do
+ let_it_be(:reports) do
+ now = Time.current
+
+ (1..3).map do |i|
+ Tempfile.new("jemalloc_stats.#{i}.worker_#{i}.#{Time.current.to_i}.json", outdir).tap do |f|
+ FileUtils.touch(f, mtime: (now + i.second).to_i)
+ end
+ end
+ end
+
+ after do
+ reports.each do |f|
+ f.close
+ f.unlink
+ end
+ end
+
+ it 'does not remove any reports' do
+ expect { jemalloc_stats.run }
+ .not_to change { Dir.entries(outdir).count { |e| e.match(/jemalloc_stats.*/) } }
+ end
+ end
+ end
+ end
+
+ context 'when :report_jemalloc_stats ops FF is disabled' do
+ before do
+ stub_feature_flags(report_jemalloc_stats: false)
+ end
+
+ it 'does not run the report and returns nil' do
+ expect(Gitlab::Memory::Jemalloc).not_to receive(:dump_stats)
+
+ expect(jemalloc_stats.run).to be_nil
+ end
+ end
+ end
+
+ describe '.active?' do
+ subject(:active) { jemalloc_stats.active? }
+
+ context 'when :report_jemalloc_stats ops FF is enabled' do
+ it { is_expected.to be true }
+ end
+
+ context 'when :report_jemalloc_stats ops FF is disabled' do
+ before do
+ stub_feature_flags(report_jemalloc_stats: false)
+ end
+
+ it { is_expected.to be false }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/reports_daemon_spec.rb b/spec/lib/gitlab/memory/reports_daemon_spec.rb
new file mode 100644
index 00000000000..c9562470971
--- /dev/null
+++ b/spec/lib/gitlab/memory/reports_daemon_spec.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Memory::ReportsDaemon do
+ let(:daemon) { described_class.new }
+
+ describe '#run_thread' do
+ let(:report_duration_counter) { instance_double(::Prometheus::Client::Counter) }
+ let(:file_size) { 1_000_000 }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:counter).and_return(report_duration_counter)
+ allow(report_duration_counter).to receive(:increment)
+
+ # make sleep no-op
+ allow(daemon).to receive(:sleep) {}
+
+ # let alive return 3 times: true, true, false
+ allow(daemon).to receive(:alive).and_return(true, true, false)
+
+ allow(File).to receive(:size).with(/#{daemon.reports_path}.*\.json/).and_return(file_size)
+ end
+
+ it 'runs reports' do
+ expect(daemon.send(:reports)).to all(receive(:run).twice.and_call_original)
+
+ daemon.send(:run_thread)
+ end
+
+ it 'logs report execution' do
+ expect(::Prometheus::PidProvider).to receive(:worker_id).at_least(:once).and_return('worker_1')
+
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ :duration_s,
+ :cpu_s,
+ perf_report_size_bytes: file_size,
+ message: 'finished',
+ pid: Process.pid,
+ worker_id: 'worker_1',
+ perf_report: 'jemalloc_stats'
+ )).twice
+
+ daemon.send(:run_thread)
+ end
+
+ context 'when the report object returns invalid file path' do
+ before do
+ allow(File).to receive(:size).with(/#{daemon.reports_path}.*\.json/).and_raise(Errno::ENOENT)
+ end
+
+ it 'logs `0` as `perf_report_size_bytes`' do
+ expect(Gitlab::AppLogger).to receive(:info).with(hash_including(perf_report_size_bytes: 0)).twice
+
+ daemon.send(:run_thread)
+ end
+ end
+
+ it 'sets real time duration gauge' do
+ expect(report_duration_counter).to receive(:increment).with({ report: 'jemalloc_stats' }, an_instance_of(Float))
+
+ daemon.send(:run_thread)
+ end
+
+ it 'allows configure and run multiple reports' do
+ # rubocop: disable RSpec/VerifiedDoubles
+ # We test how ReportsDaemon could be extended in the future
+ # We configure it with new reports classes which are not yet defined so we cannot make this an instance_double.
+ active_report_1 = double("Active Report 1", active?: true)
+ active_report_2 = double("Active Report 2", active?: true)
+ inactive_report = double("Inactive Report", active?: false)
+ # rubocop: enable RSpec/VerifiedDoubles
+
+ allow(daemon).to receive(:reports).and_return([active_report_1, inactive_report, active_report_2])
+
+ expect(active_report_1).to receive(:run).and_return('/tmp/report_1.json').twice
+ expect(active_report_2).to receive(:run).and_return('/tmp/report_2.json').twice
+ expect(inactive_report).not_to receive(:run)
+
+ daemon.send(:run_thread)
+ end
+
+ context 'sleep timers logic' do
+ it 'wakes up every (fixed interval + defined delta), sleeps between reports each cycle' do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_MAX_DELTA_S', 1) # rand(1) == 0, so we will have fixed sleep interval
+ daemon = described_class.new
+ allow(daemon).to receive(:alive).and_return(true, true, false)
+
+ expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_S).ordered
+ expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_BETWEEN_REPORTS_S).ordered
+ expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_S).ordered
+ expect(daemon).to receive(:sleep).with(described_class::DEFAULT_SLEEP_BETWEEN_REPORTS_S).ordered
+
+ daemon.send(:run_thread)
+ end
+ end
+ end
+
+ describe '#stop_working' do
+ it 'changes :alive to false' do
+ expect { daemon.send(:stop_working) }.to change { daemon.send(:alive) }.from(true).to(false)
+ end
+ end
+
+ context 'timer intervals settings' do
+ context 'when no settings are set in the environment' do
+ it 'uses defaults' do
+ daemon = described_class.new
+
+ expect(daemon.sleep_s).to eq(described_class::DEFAULT_SLEEP_S)
+ expect(daemon.sleep_max_delta_s).to eq(described_class::DEFAULT_SLEEP_MAX_DELTA_S)
+ expect(daemon.sleep_between_reports_s).to eq(described_class::DEFAULT_SLEEP_BETWEEN_REPORTS_S)
+ expect(daemon.reports_path).to eq(described_class::DEFAULT_REPORTS_PATH)
+ end
+ end
+
+ context 'when settings are passed through the environment' do
+ before do
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_S', 100)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_MAX_DELTA_S', 50)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_SLEEP_BETWEEN_REPORTS_S', 2)
+ stub_env('GITLAB_DIAGNOSTIC_REPORTS_PATH', '/empty-dir')
+ end
+
+ it 'uses provided values' do
+ daemon = described_class.new
+
+ expect(daemon.sleep_s).to eq(100)
+ expect(daemon.sleep_max_delta_s).to eq(50)
+ expect(daemon.sleep_between_reports_s).to eq(2)
+ expect(daemon.reports_path).to eq('/empty-dir')
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index 8b82078bcb9..010f6884df3 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -14,32 +14,57 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
let(:sleep_time) { 0.1 }
let(:max_heap_fragmentation) { 0.2 }
+ # Tests should set this to control the number of loop iterations in `call`.
+ let(:watchdog_iterations) { 1 }
+
subject(:watchdog) do
described_class.new(handler: handler, logger: logger, sleep_time_seconds: sleep_time,
- max_strikes: max_strikes, max_heap_fragmentation: max_heap_fragmentation)
+ max_strikes: max_strikes, max_heap_fragmentation: max_heap_fragmentation).tap do |instance|
+ # We need to defuse `sleep` and stop the internal loop after N iterations.
+ iterations = 0
+ expect(instance).to receive(:sleep) do
+ instance.stop if (iterations += 1) >= watchdog_iterations
+ end.at_most(watchdog_iterations)
+ end
+ end
+
+ def stub_prometheus_metrics
+ allow(Gitlab::Metrics).to receive(:gauge)
+ .with(:gitlab_memwd_heap_frag_limit, anything)
+ .and_return(heap_frag_limit_gauge)
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_memwd_heap_frag_violations_total, anything, anything)
+ .and_return(heap_frag_violations_counter)
+ allow(Gitlab::Metrics).to receive(:counter)
+ .with(:gitlab_memwd_heap_frag_violations_handled_total, anything, anything)
+ .and_return(heap_frag_violations_handled_counter)
+
+ allow(heap_frag_limit_gauge).to receive(:set)
+ allow(heap_frag_violations_counter).to receive(:increment)
+ allow(heap_frag_violations_handled_counter).to receive(:increment)
end
before do
+ stub_prometheus_metrics
+
allow(handler).to receive(:on_high_heap_fragmentation).and_return(true)
allow(logger).to receive(:warn)
allow(logger).to receive(:info)
allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(fragmentation)
- end
- after do
- watchdog.stop
+ allow(::Prometheus::PidProvider).to receive(:worker_id).and_return('worker_1')
end
- context 'when starting up' do
+ context 'when created' do
let(:fragmentation) { 0 }
let(:max_strikes) { 0 }
it 'sets the heap fragmentation limit gauge' do
- allow(Gitlab::Metrics).to receive(:gauge).and_return(heap_frag_limit_gauge)
-
expect(heap_frag_limit_gauge).to receive(:set).with({}, max_heap_fragmentation)
+
+ watchdog
end
context 'when no settings are set in the environment' do
@@ -76,77 +101,54 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
it 'does not signal the handler' do
expect(handler).not_to receive(:on_high_heap_fragmentation)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
end
context 'when process exceeds heap fragmentation threshold permanently' do
let(:fragmentation) { max_heap_fragmentation + 0.1 }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:gitlab_memwd_heap_frag_violations_total, anything, anything)
- .and_return(heap_frag_violations_counter)
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:gitlab_memwd_heap_frag_violations_handled_total, anything, anything)
- .and_return(heap_frag_violations_handled_counter)
- allow(heap_frag_violations_counter).to receive(:increment)
- allow(heap_frag_violations_handled_counter).to receive(:increment)
- end
+ let(:max_strikes) { 3 }
context 'when process has not exceeded allowed number of strikes' do
- let(:max_strikes) { 10 }
+ let(:watchdog_iterations) { max_strikes }
it 'does not signal the handler' do
expect(handler).not_to receive(:on_high_heap_fragmentation)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
it 'does not log any events' do
expect(logger).not_to receive(:warn)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
it 'increments the violations counter' do
- expect(heap_frag_violations_counter).to receive(:increment)
-
- watchdog.start
+ expect(heap_frag_violations_counter).to receive(:increment).exactly(watchdog_iterations)
- sleep sleep_time * 3
+ watchdog.call
end
it 'does not increment violations handled counter' do
expect(heap_frag_violations_handled_counter).not_to receive(:increment)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
end
context 'when process exceeds the allowed number of strikes' do
- let(:max_strikes) { 1 }
+ let(:watchdog_iterations) { max_strikes + 1 }
it 'signals the handler and resets strike counter' do
expect(handler).to receive(:on_high_heap_fragmentation).and_return(true)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
expect(watchdog.strikes).to eq(0)
end
it 'logs the event' do
- expect(::Prometheus::PidProvider).to receive(:worker_id).at_least(:once).and_return('worker_1')
expect(Gitlab::Metrics::System).to receive(:memory_usage_rss).at_least(:once).and_return(1024)
expect(logger).to receive(:warn).with({
message: 'heap fragmentation limit exceeded',
@@ -161,18 +163,14 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
memwd_rss_bytes: 1024
})
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
it 'increments both the violations and violations handled counters' do
- expect(heap_frag_violations_counter).to receive(:increment)
+ expect(heap_frag_violations_counter).to receive(:increment).exactly(watchdog_iterations)
expect(heap_frag_violations_handled_counter).to receive(:increment)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
context 'when enforce_memory_watchdog ops toggle is off' do
@@ -186,35 +184,31 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
receive(:on_high_heap_fragmentation).with(fragmentation).and_return(true)
)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
end
- end
-
- context 'when handler result is true' do
- let(:max_strikes) { 1 }
- it 'considers the event handled and stops itself' do
- expect(handler).to receive(:on_high_heap_fragmentation).once.and_return(true)
+ context 'when handler result is true' do
+ it 'considers the event handled and stops itself' do
+ expect(handler).to receive(:on_high_heap_fragmentation).once.and_return(true)
+ expect(logger).to receive(:info).with(hash_including(message: 'stopped'))
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
+ end
end
- end
-
- context 'when handler result is false' do
- let(:max_strikes) { 1 }
- it 'keeps running' do
- # Return true the third time to terminate the daemon.
- expect(handler).to receive(:on_high_heap_fragmentation).and_return(false, false, true)
+ context 'when handler result is false' do
+ let(:max_strikes) { 0 } # to make sure the handler fires each iteration
+ let(:watchdog_iterations) { 3 }
- watchdog.start
+ it 'keeps running' do
+ expect(heap_frag_violations_counter).to receive(:increment).exactly(watchdog_iterations)
+ expect(heap_frag_violations_handled_counter).to receive(:increment).exactly(watchdog_iterations)
+ # Return true the third time to terminate the daemon.
+ expect(handler).to receive(:on_high_heap_fragmentation).and_return(false, false, true)
- sleep sleep_time * 4
+ watchdog.call
+ end
end
end
end
@@ -222,6 +216,7 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
context 'when process exceeds heap fragmentation threshold temporarily' do
let(:fragmentation) { max_heap_fragmentation }
let(:max_strikes) { 1 }
+ let(:watchdog_iterations) { 4 }
before do
allow(Gitlab::Metrics::Memory).to receive(:gc_heap_fragmentation).and_return(
@@ -235,9 +230,7 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
it 'does not signal the handler' do
expect(handler).not_to receive(:on_high_heap_fragmentation)
- watchdog.start
-
- sleep sleep_time * 4
+ watchdog.call
end
end
@@ -252,9 +245,7 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, :prometheus do
it 'does not monitor heap fragmentation' do
expect(Gitlab::Metrics::Memory).not_to receive(:gc_heap_fragmentation)
- watchdog.start
-
- sleep sleep_time * 3
+ watchdog.call
end
end
end
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
index 83bee84df99..2e48070cb4f 100644
--- a/spec/lib/gitlab/metrics/background_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do
end
it 'removes the transaction from the current thread upon completion' do
- transaction.run { }
+ transaction.run {}
expect(Thread.current[described_class::THREAD_KEY]).to be_nil
end
diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb
index 06ce58a9e84..d6590efcf4f 100644
--- a/spec/lib/gitlab/metrics/web_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do
end
it 'removes the transaction from the current thread upon completion' do
- transaction.run { }
+ transaction.run {}
expect(Thread.current[described_class::THREAD_KEY]).to be_nil
expect(described_class.current).to be_nil
diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb
index a07cd49c572..6d49ab58d5d 100644
--- a/spec/lib/gitlab/middleware/compressed_json_spec.rb
+++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
describe '#call' do
context 'with collector route' do
- let(:path) { '/api/v4/error_tracking/collector/1/store'}
+ let(:path) { '/api/v4/error_tracking/collector/1/store' }
it_behaves_like 'decompress middleware'
@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
end
context 'with collector route under relative url' do
- let(:path) { '/gitlab/api/v4/error_tracking/collector/1/store'}
+ let(:path) { '/gitlab/api/v4/error_tracking/collector/1/store' }
before do
stub_config_setting(relative_url_root: '/gitlab')
@@ -71,7 +71,7 @@ RSpec.describe Gitlab::Middleware::CompressedJson do
let(:body_limit) { Gitlab::Middleware::CompressedJson::MAXIMUM_BODY_SIZE }
let(:decompressed_input) { 'a' * (body_limit + 100) }
let(:input) { ActiveSupport::Gzip.compress(decompressed_input) }
- let(:path) { '/api/v4/error_tracking/collector/1/envelope'}
+ let(:path) { '/api/v4/error_tracking/collector/1/envelope' }
it 'reads only limited size' do
expect(middleware.call(env))
diff --git a/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb b/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb
index e6815a46a56..91c030a0f45 100644
--- a/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb
+++ b/spec/lib/gitlab/middleware/sidekiq_web_static_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::Middleware::SidekiqWebStatic do
end
context 'with an /admin/sidekiq route' do
- let(:path) { '/admin/sidekiq/javascripts/application.js'}
+ let(:path) { '/admin/sidekiq/javascripts/application.js' }
it 'deletes the HTTP_X_SENDFILE_TYPE header' do
expect(app).to receive(:call)
diff --git a/spec/lib/gitlab/octokit/middleware_spec.rb b/spec/lib/gitlab/octokit/middleware_spec.rb
index bc4d95738c7..92e424978ff 100644
--- a/spec/lib/gitlab/octokit/middleware_spec.rb
+++ b/spec/lib/gitlab/octokit/middleware_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe Gitlab::Octokit::Middleware do
it_behaves_like 'Public URL'
end
- context 'when the URL is a localhost adresss' do
+ context 'when the URL is a localhost address' do
let(:env) { { url: 'http://127.0.0.1' } }
context 'when localhost requests are not allowed' do
diff --git a/spec/lib/gitlab/otp_key_rotator_spec.rb b/spec/lib/gitlab/otp_key_rotator_spec.rb
index e328b190db4..e3b9f006b19 100644
--- a/spec/lib/gitlab/otp_key_rotator_spec.rb
+++ b/spec/lib/gitlab/otp_key_rotator_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Gitlab::OtpKeyRotator do
it 'stores the calculated values in a spreadsheet' do
rotation
- expect(data).to match_array(users.map {|u| build_row(u) })
+ expect(data).to match_array(users.map { |u| build_row(u) })
end
context 'new key is too short' do
diff --git a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
index dcb8138bdde..0bafd436bd0 100644
--- a/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
+++ b/spec/lib/gitlab/pagination/gitaly_keyset_pager_spec.rb
@@ -126,5 +126,19 @@ RSpec.describe Gitlab::Pagination::GitalyKeysetPager do
end
end
end
+
+ context 'with "none" pagination option' do
+ let(:expected_result) { double(:result) }
+ let(:query) { { pagination: 'none' } }
+
+ it 'uses offset pagination' do
+ expect(finder).to receive(:execute).with(gitaly_pagination: false).and_return(expected_result)
+ expect(Kaminari).not_to receive(:paginate_array)
+ expect(Gitlab::Pagination::OffsetPagination).not_to receive(:new)
+
+ actual_result = pager.paginate(finder)
+ expect(actual_result).to eq(expected_result)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/pagination/keyset_spec.rb b/spec/lib/gitlab/pagination/keyset_spec.rb
index 81dc40b35d5..8885e684d8a 100644
--- a/spec/lib/gitlab/pagination/keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Pagination::Keyset do
describe '.available?' do
subject { described_class }
- let(:request_context) { double("request context", page: page)}
+ let(:request_context) { double("request context", page: page) }
let(:page) { double("page", order_by: order_by) }
shared_examples_for 'keyset pagination is available' do
diff --git a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb b/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb
index c368b349a3c..a444e7fdf47 100644
--- a/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/conduit/response_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Gitlab::PhabricatorImport::Conduit::Response do
- let(:response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json')))}
+ let(:response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/maniphest.search.json'))) }
let(:error_response) { described_class.new(Gitlab::Json.parse(fixture_file('phabricator_responses/auth_failed.json'))) }
describe '.parse!' do
diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb
index 89ddde4a01d..9083c5625d4 100644
--- a/spec/lib/gitlab/prometheus_client_spec.rb
+++ b/spec/lib/gitlab/prometheus_client_spec.rb
@@ -104,7 +104,7 @@ RSpec.describe Gitlab::PrometheusClient do
end
describe 'failure to reach a provided prometheus url' do
- let(:prometheus_url) {"https://prometheus.invalid.example.com/api/v1/query?query=1"}
+ let(:prometheus_url) { "https://prometheus.invalid.example.com/api/v1/query?query=1" }
shared_examples 'exceptions are raised' do
Gitlab::HTTP::HTTP_ERRORS.each do |error|
diff --git a/spec/lib/gitlab/quick_actions/extractor_spec.rb b/spec/lib/gitlab/quick_actions/extractor_spec.rb
index c040a70e403..e2f289041ce 100644
--- a/spec/lib/gitlab/quick_actions/extractor_spec.rb
+++ b/spec/lib/gitlab/quick_actions/extractor_spec.rb
@@ -7,10 +7,10 @@ RSpec.describe Gitlab::QuickActions::Extractor do
Class.new do
include Gitlab::QuickActions::Dsl
- command(:reopen, :open) { }
- command(:assign) { }
- command(:labels) { }
- command(:power) { }
+ command(:reopen, :open) {}
+ command(:assign) {}
+ command(:labels) {}
+ command(:power) {}
command(:noop_command)
substitution(:substitution) { 'foo' }
substitution :shrug do |comment|
diff --git a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
index bd167ee2e3e..8151519ddec 100644
--- a/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
+++ b/spec/lib/gitlab/rack_attack/instrumented_cache_store_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Gitlab::RackAttack::InstrumentedCacheStore do
let(:store) { ::ActiveSupport::Cache::NullStore.new }
- subject { described_class.new(upstream_store: store)}
+ subject { described_class.new(upstream_store: store) }
where(:operation, :params, :test_proc) do
:fetch | [:key] | ->(s) { s.fetch(:key) }
diff --git a/spec/lib/gitlab/rack_attack/user_allowlist_spec.rb b/spec/lib/gitlab/rack_attack/user_allowlist_spec.rb
index aa604dfab71..1b6fa584e3e 100644
--- a/spec/lib/gitlab/rack_attack/user_allowlist_spec.rb
+++ b/spec/lib/gitlab/rack_attack/user_allowlist_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::RackAttack::UserAllowlist do
using RSpec::Parameterized::TableSyntax
- subject { described_class.new(input)}
+ subject { described_class.new(input) }
where(:input, :elements) do
nil | []
diff --git a/spec/lib/gitlab/redis/cache_spec.rb b/spec/lib/gitlab/redis/cache_spec.rb
index 31141ac1139..1f0ebbe107f 100644
--- a/spec/lib/gitlab/redis/cache_spec.rb
+++ b/spec/lib/gitlab/redis/cache_spec.rb
@@ -15,4 +15,16 @@ RSpec.describe Gitlab::Redis::Cache do
expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380' )
end
end
+
+ describe '.active_support_config' do
+ it 'has a default ttl of 2 weeks' do
+ expect(described_class.active_support_config[:expires_in]).to eq(2.weeks)
+ end
+
+ it 'allows configuring the TTL through an env variable' do
+ stub_env('GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS' => '86400')
+
+ expect(described_class.active_support_config[:expires_in]).to eq(1.day)
+ end
+ end
end
diff --git a/spec/lib/gitlab/redis/hll_spec.rb b/spec/lib/gitlab/redis/hll_spec.rb
index e452e5b2f52..9cd339239bb 100644
--- a/spec/lib/gitlab/redis/hll_spec.rb
+++ b/spec/lib/gitlab/redis/hll_spec.rb
@@ -64,10 +64,10 @@ RSpec.describe Gitlab::Redis::HLL, :clean_gitlab_redis_shared_state do
let(:event_2020_33) { '2020-33-{expand_vulnerabilities}' }
let(:event_2020_34) { '2020-34-{expand_vulnerabilities}' }
- let(:entity1) { 'user_id_1'}
- let(:entity2) { 'user_id_2'}
- let(:entity3) { 'user_id_3'}
- let(:entity4) { 'user_id_4'}
+ let(:entity1) { 'user_id_1' }
+ let(:entity2) { 'user_id_2' }
+ let(:entity3) { 'user_id_3' }
+ let(:entity4) { 'user_id_4' }
before do
track_event(event_2020_32, entity1)
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index 50ebf43a05e..ef8549548d7 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) }
let_it_be(:instance_name) { 'TestStore' }
- let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+ let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
subject { multi_store.send(name, *args) }
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
context 'when primary_store is nil' do
- let(:multi_store) { described_class.new(nil, secondary_store, instance_name)}
+ let(:multi_store) { described_class.new(nil, secondary_store, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/)
@@ -46,7 +46,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
end
context 'when secondary_store is nil' do
- let(:multi_store) { described_class.new(primary_store, nil, instance_name)}
+ let(:multi_store) { described_class.new(primary_store, nil, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/)
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
context 'when instance_name is nil' do
let(:instance_name) { nil }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
it 'fails with exception' do
expect { multi_store }.to raise_error(ArgumentError, /instance_name is required/)
@@ -111,8 +111,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
context 'with READ redis commands' do
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
- let_it_be(:value1) { "redis_value1"}
- let_it_be(:value2) { "redis_value2"}
+ let_it_be(:value1) { "redis_value1" }
+ let_it_be(:value2) { "redis_value2" }
let_it_be(:skey) { "redis:set:key" }
let_it_be(:keys) { [key1, key2] }
let_it_be(:values) { [value1, value2] }
@@ -330,7 +330,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
context 'with both primary and secondary store using same redis instance' do
let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) }
- let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)}
+ let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
it_behaves_like 'secondary store'
end
@@ -356,8 +356,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
context 'with WRITE redis commands' do
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
- let_it_be(:value1) { "redis_value1"}
- let_it_be(:value2) { "redis_value2"}
+ let_it_be(:value1) { "redis_value1" }
+ let_it_be(:value2) { "redis_value2" }
let_it_be(:key1_value1) { [key1, value1] }
let_it_be(:key1_value2) { [key1, value2] }
let_it_be(:ttl) { 10 }
@@ -395,7 +395,7 @@ RSpec.describe Gitlab::Redis::MultiStore do
with_them do
describe "#{name}" do
- let(:expected_args) {args || no_args }
+ let(:expected_args) { args || no_args }
before do
allow(primary_store).to receive(name).and_call_original
@@ -496,8 +496,8 @@ RSpec.describe Gitlab::Redis::MultiStore do
RSpec.shared_examples_for 'pipelined command' do |name|
let_it_be(:key1) { "redis:{1}:key_a" }
- let_it_be(:value1) { "redis_value1"}
- let_it_be(:value2) { "redis_value2"}
+ let_it_be(:value1) { "redis_value1" }
+ let_it_be(:value2) { "redis_value2" }
let_it_be(:expected_value) { value1 }
let_it_be(:verification_name) { :get }
let_it_be(:verification_args) { key1 }
diff --git a/spec/lib/gitlab/reference_counter_spec.rb b/spec/lib/gitlab/reference_counter_spec.rb
index 83e4006c69b..05294fb84e7 100644
--- a/spec/lib/gitlab/reference_counter_spec.rb
+++ b/spec/lib/gitlab/reference_counter_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
it 'resets reference count down to zero' do
3.times { reference_counter.increase }
- expect { reference_counter.reset! }.to change { reference_counter.value}.from(3).to(0)
+ expect { reference_counter.reset! }.to change { reference_counter.value }.from(3).to(0)
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index a3afbed18e2..d8f182d903d 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -270,7 +270,7 @@ RSpec.describe Gitlab::Regex do
context 'conan recipe components' do
shared_examples 'accepting valid recipe components values' do
- let(:fifty_one_characters) { 'f_a' * 17}
+ let(:fifty_one_characters) { 'f_a' * 17 }
it { is_expected.to match('foobar') }
it { is_expected.to match('foo_bar') }
@@ -374,12 +374,12 @@ RSpec.describe Gitlab::Regex do
end
end
- it { is_expected.to match('0')}
+ it { is_expected.to match('0') }
it { is_expected.to match('1') }
it { is_expected.to match('03') }
it { is_expected.to match('2.0') }
it { is_expected.to match('01.2') }
- it { is_expected.to match('10.2.3-beta')}
+ it { is_expected.to match('10.2.3-beta') }
it { is_expected.to match('1.2-SNAPSHOT') }
it { is_expected.to match('20') }
it { is_expected.to match('20.3') }
@@ -454,7 +454,7 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('0.1') }
it { is_expected.to match('2.0') }
- it { is_expected.to match('1.2.0')}
+ it { is_expected.to match('1.2.0') }
it { is_expected.to match('0100!0.0') }
it { is_expected.to match('00!1.2') }
it { is_expected.to match('1.0a') }
diff --git a/spec/lib/gitlab/search/abuse_detection_spec.rb b/spec/lib/gitlab/search/abuse_detection_spec.rb
index a18d28456cd..2a8d74a62ab 100644
--- a/spec/lib/gitlab/search/abuse_detection_spec.rb
+++ b/spec/lib/gitlab/search/abuse_detection_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Search::AbuseDetection do
subject { described_class.new(params) }
- let(:params) {{ query_string: 'foobar' }}
+ let(:params) { { query_string: 'foobar' } }
describe 'abusive scopes validation' do
it 'allows only approved scopes' do
diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb
index a09115f3f21..78799b67a69 100644
--- a/spec/lib/gitlab/search_context/builder_spec.rb
+++ b/spec/lib/gitlab/search_context/builder_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::SearchContext::Builder, type: :controller do
- controller(ApplicationController) { }
+ controller(ApplicationController) {}
subject(:builder) { described_class.new(controller.view_context) }
diff --git a/spec/lib/gitlab/seeder_spec.rb b/spec/lib/gitlab/seeder_spec.rb
index a94ae2bca7a..0ad80323085 100644
--- a/spec/lib/gitlab/seeder_spec.rb
+++ b/spec/lib/gitlab/seeder_spec.rb
@@ -77,4 +77,44 @@ RSpec.describe Gitlab::Seeder do
end
end
end
+
+ describe ::Gitlab::Seeder::Ci::DailyBuildGroupReportResult do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:build) { create(:ci_build, :success, pipeline: pipeline) }
+
+ subject(:build_report) do
+ described_class.new(project)
+ end
+
+ describe '#seed' do
+ it 'creates daily build results for the project' do
+ expect { build_report.seed }.to change {
+ Ci::DailyBuildGroupReportResult.count
+ }.by(Gitlab::Seeder::Ci::DailyBuildGroupReportResult::COUNT_OF_DAYS)
+ end
+
+ it 'matches project data with last report' do
+ build_report.seed
+
+ report = project.daily_build_group_report_results.last
+ reports_count = project.daily_build_group_report_results.count
+
+ expect(build.group_name).to eq(report.group_name)
+ expect(pipeline.source_ref_path).to eq(report.ref_path)
+ expect(pipeline.default_branch?).to eq(report.default_branch)
+ expect(reports_count).to eq(Gitlab::Seeder::Ci::DailyBuildGroupReportResult::COUNT_OF_DAYS)
+ end
+
+ it 'does not raise error on RecordNotUnique' do
+ build_report.seed
+ build_report.seed
+
+ reports_count = project.daily_build_group_report_results.count
+
+ expect(reports_count).to eq(Gitlab::Seeder::Ci::DailyBuildGroupReportResult::COUNT_OF_DAYS)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/session_spec.rb b/spec/lib/gitlab/session_spec.rb
index de680e8425e..67ad59f956d 100644
--- a/spec/lib/gitlab/session_spec.rb
+++ b/spec/lib/gitlab/session_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Gitlab::Session do
end
it 'restores current store after' do
- described_class.with_session(two: 2) { }
+ described_class.with_session(two: 2) {}
expect(described_class.current).to eq nil
end
diff --git a/spec/lib/gitlab/sidekiq_config_spec.rb b/spec/lib/gitlab/sidekiq_config_spec.rb
index 4a1a9beb21a..c62302d8bba 100644
--- a/spec/lib/gitlab/sidekiq_config_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config_spec.rb
@@ -194,7 +194,7 @@ RSpec.describe Gitlab::SidekiqConfig do
queues = described_class.routing_queues
expect(queues).to match_array(%w[
- default mailers high_urgency gitaly email_receiver service_desk_email_receiver
+ default mailers high_urgency gitaly
])
expect(queues).not_to include('not_exist')
end
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index 01b7270d761..635f572daef 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
describe '#stop_working' do
- subject { memory_killer.send(:stop_working)}
+ subject { memory_killer.send(:stop_working) }
it 'changes enable? to false' do
expect { subject }.to change { memory_killer.send(:enabled?) }
@@ -355,6 +355,7 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
let(:reason) { 'rss out of range reason description' }
let(:queue) { 'default' }
let(:running_jobs) { [{ jid: jid, worker_class: 'DummyWorker' }] }
+ let(:metrics) { memory_killer.instance_variable_get(:@metrics) }
let(:worker) do
Class.new do
def self.name
@@ -390,6 +391,9 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
reason: reason,
running_jobs: running_jobs)
+ expect(metrics[:sidekiq_memory_killer_running_jobs]).to receive(:increment)
+ .with({ worker_class: "DummyWorker", deadline_exceeded: true })
+
Gitlab::SidekiqDaemon::Monitor.instance.within_job(DummyWorker, jid, queue) do
subject
end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 9c0cbe21e6b..e3d9549a3c0 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(job, 'test_queue') { }
+ call_subject(job, 'test_queue') {}
end
end
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(wrapped_job, 'test_queue') { }
+ call_subject(wrapped_job, 'test_queue') {}
end
end
@@ -175,7 +175,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(job, 'test_queue') { }
+ call_subject(job, 'test_queue') {}
end
end
@@ -188,7 +188,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(job.except("created_at", "enqueued_at"), 'test_queue') { }
+ call_subject(job.except("created_at", "enqueued_at"), 'test_queue') {}
end
end
end
@@ -204,7 +204,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(job, 'test_queue') { }
+ call_subject(job, 'test_queue') {}
end
end
end
@@ -233,7 +233,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(subject).to receive(:log_job_start).and_call_original
expect(subject).to receive(:log_job_done).and_call_original
- call_subject(job, 'test_queue') { }
+ call_subject(job, 'test_queue') {}
end
end
end
@@ -266,7 +266,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
- call_subject(job, 'test_queue') { }
+ call_subject(job, 'test_queue') {}
end
end
end
@@ -330,7 +330,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
Gitlab::SafeRequestStore.clear!
- call_subject(job.dup, 'test_queue') { }
+ call_subject(job.dup, 'test_queue') {}
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb b/spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb
index 85cddfa7bf1..d61c9765753 100644
--- a/spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb
@@ -41,7 +41,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::Monitor do
::Sidekiq::DeadSet.new.clear
expect do
- subject rescue Sidekiq::JobRetry::Skip
+ subject
+ rescue Sidekiq::JobRetry::Skip
+ nil
end.to change { ::Sidekiq::DeadSet.new.size }.by(1)
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 117b37ffda3..d6d24ea3a24 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -109,6 +109,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
expect(elasticsearch_seconds_metric).to receive(:observe).with(labels_with_job_status, elasticsearch_duration)
expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
+ expect(sidekiq_mem_total_bytes).to receive(:set).with(labels_with_job_status, mem_total_bytes)
subject.call(worker, job, :test) { nil }
end
diff --git a/spec/lib/gitlab/slash_commands/deploy_spec.rb b/spec/lib/gitlab/slash_commands/deploy_spec.rb
index 5167523ff58..5af234ff88e 100644
--- a/spec/lib/gitlab/slash_commands/deploy_spec.rb
+++ b/spec/lib/gitlab/slash_commands/deploy_spec.rb
@@ -165,7 +165,7 @@ RSpec.describe Gitlab::SlashCommands::Deploy do
context 'with ReDoS attempts' do
def duration_for(&block)
start = Time.zone.now
- yield if block_given?
+ yield if block
Time.zone.now - start
end
diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb
index a6e7665569c..956ed2a976f 100644
--- a/spec/lib/gitlab/spamcheck/client_spec.rb
+++ b/spec/lib/gitlab/spamcheck/client_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Gitlab::Spamcheck::Client do
let(:stub) { double(:spamcheck_stub, check_for_spam_issue: response) }
context 'is tls ' do
- let(:endpoint) { 'tls://spamcheck.example.com'}
+ let(:endpoint) { 'tls://spamcheck.example.com' }
it 'uses secure connection' do
expect(Spamcheck::SpamcheckService::Stub).to receive(:new).with(endpoint.sub(%r{^tls://}, ''),
@@ -97,7 +97,7 @@ RSpec.describe Gitlab::Spamcheck::Client do
context: cxt)
expect(issue_pb.title).to eq issue.title
expect(issue_pb.description).to eq issue.description
- expect(issue_pb.user_in_project). to be false
+ expect(issue_pb.user_in_project).to be false
expect(issue_pb.project.project_id).to eq issue.project_id
expect(issue_pb.created_at).to eq timestamp_to_protobuf_timestamp(issue.created_at)
expect(issue_pb.updated_at).to eq timestamp_to_protobuf_timestamp(issue.updated_at)
@@ -118,7 +118,7 @@ RSpec.describe Gitlab::Spamcheck::Client do
end
context 'when user has multiple email addresses' do
- let(:secondary_email) {create(:email, :confirmed, user: user)}
+ let(:secondary_email) { create(:email, :confirmed, user: user) }
before do
user.emails << secondary_email
diff --git a/spec/lib/gitlab/ssh/commit_spec.rb b/spec/lib/gitlab/ssh/commit_spec.rb
new file mode 100644
index 00000000000..cc977a80f95
--- /dev/null
+++ b/spec/lib/gitlab/ssh/commit_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ssh::Commit do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:signed_by_key) { create(:key) }
+
+ let(:commit) { create(:commit, project: project) }
+ let(:signature_text) { 'signature_text' }
+ let(:signed_text) { 'signed_text' }
+ let(:signature_data) { [signature_text, signed_text] }
+ let(:verifier) { instance_double('Gitlab::Ssh::Signature') }
+ let(:verification_status) { :verified }
+
+ subject(:signature) { described_class.new(commit).signature }
+
+ before do
+ allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily)
+ .with(Gitlab::Git::Repository, commit.sha)
+ .and_return(signature_data)
+
+ allow(verifier).to receive(:verification_status).and_return(verification_status)
+ allow(verifier).to receive(:signed_by_key).and_return(signed_by_key)
+
+ allow(Gitlab::Ssh::Signature).to receive(:new)
+ .with(signature_text, signed_text, commit.committer_email)
+ .and_return(verifier)
+ end
+
+ describe '#signature' do
+ it 'returns the cached signature on multiple calls' do
+ ssh_commit = described_class.new(commit)
+
+ expect(ssh_commit).to receive(:create_cached_signature!).and_call_original
+ ssh_commit.signature
+
+ expect(ssh_commit).not_to receive(:create_cached_signature!)
+ ssh_commit.signature
+ end
+
+ context 'when all expected data is present' do
+ it 'calls signature verifier and uses returned attributes' do
+ expect(signature).to have_attributes(
+ commit_sha: commit.sha,
+ project: project,
+ key_id: signed_by_key.id,
+ verification_status: 'verified'
+ )
+ end
+ end
+
+ context 'when signed_by_key is nil' do
+ let_it_be(:signed_by_key) { nil }
+
+ let(:verification_status) { :unknown_key }
+
+ it 'creates signature without a key_id' do
+ expect(signature).to have_attributes(
+ commit_sha: commit.sha,
+ project: project,
+ key_id: nil,
+ verification_status: 'unknown_key'
+ )
+ end
+ end
+ end
+
+ describe '#update_signature!' do
+ it 'updates verification status' do
+ allow(verifier).to receive(:verification_status).and_return(:unverified)
+ signature
+
+ stored_signature = CommitSignatures::SshSignature.find_by_commit_sha(commit.sha)
+
+ allow(verifier).to receive(:verification_status).and_return(:verified)
+
+ expect { described_class.new(commit).update_signature!(stored_signature) }.to(
+ change { signature.reload.verification_status }.from('unverified').to('verified')
+ )
+ end
+ end
+end
diff --git a/spec/lib/gitlab/suggestions/file_suggestion_spec.rb b/spec/lib/gitlab/suggestions/file_suggestion_spec.rb
index 1d25bf6edbd..5971f4ebbce 100644
--- a/spec/lib/gitlab/suggestions/file_suggestion_spec.rb
+++ b/spec/lib/gitlab/suggestions/file_suggestion_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Gitlab::Suggestions::FileSuggestion do
let_it_be(:user) { create(:user) }
- let_it_be(:file_path) { 'files/ruby/popen.rb'}
+ let_it_be(:file_path) { 'files/ruby/popen.rb' }
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
index 2554a15d97e..48092a33da3 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb
@@ -48,40 +48,8 @@ RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do
allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
end
- context 'when SNOWPLOW_MICRO_URI has scheme and port' do
- before do
- stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091')
- end
-
- it 'returns hostname URI part' do
- expect(subject.hostname).to eq('gdk.test:9091')
- end
- end
-
- context 'when SNOWPLOW_MICRO_URI is without protocol' do
- before do
- stub_env('SNOWPLOW_MICRO_URI', 'gdk.test:9091')
- end
-
- it 'returns hostname URI part' do
- expect(subject.hostname).to eq('gdk.test:9091')
- end
- end
-
- context 'when SNOWPLOW_MICRO_URI is hostname only' do
- before do
- stub_env('SNOWPLOW_MICRO_URI', 'uriwithoutport')
- end
-
- it 'returns hostname URI with default HTTP port' do
- expect(subject.hostname).to eq('uriwithoutport:80')
- end
- end
-
- context 'when SNOWPLOW_MICRO_URI is not set' do
- it 'returns localhost hostname' do
- expect(subject.hostname).to eq('localhost:9090')
- end
+ it 'returns localhost hostname' do
+ expect(subject.hostname).to eq('localhost:9090')
end
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index dd62c832f6f..028c985f3b3 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -90,15 +90,6 @@ RSpec.describe Gitlab::Tracking do
it_behaves_like 'delegates to SnowplowMicro destination with proper options'
end
-
- context "enabled with env variable" do
- before do
- allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
- stub_env('SNOWPLOW_MICRO_ENABLE', '1')
- end
-
- it_behaves_like 'delegates to SnowplowMicro destination with proper options'
- end
end
it 'when feature flag is disabled' do
@@ -149,7 +140,6 @@ RSpec.describe Gitlab::Tracking do
context 'when destination is Snowplow' do
before do
- stub_env('SNOWPLOW_MICRO_ENABLE', '0')
allow(Rails.env).to receive(:development?).and_return(true)
end
@@ -158,7 +148,6 @@ RSpec.describe Gitlab::Tracking do
context 'when destination is SnowplowMicro' do
before do
- stub_env('SNOWPLOW_MICRO_ENABLE', '1')
allow(Rails.env).to receive(:development?).and_return(true)
end
@@ -181,7 +170,7 @@ RSpec.describe Gitlab::Tracking do
let_it_be(:definition_action) { 'definition_action' }
let_it_be(:definition_category) { 'definition_category' }
let_it_be(:label_description) { 'definition label description' }
- let_it_be(:test_definition) {{ 'category': definition_category, 'action': definition_action }}
+ let_it_be(:test_definition) { { 'category': definition_category, 'action': definition_action } }
before do
allow_next_instance_of(described_class) do |instance|
@@ -212,4 +201,28 @@ RSpec.describe Gitlab::Tracking do
project: project, user: user, namespace: namespace, extra_key_1: 'extra value 1')
end
end
+
+ describe 'snowplow_micro_enabled?' do
+ before do
+ allow(Rails.env).to receive(:development?).and_return(true)
+ end
+
+ it 'returns true when snowplow_micro is enabled' do
+ stub_config(snowplow_micro: { enabled: true })
+
+ expect(described_class).to be_snowplow_micro_enabled
+ end
+
+ it 'returns false when snowplow_micro is disabled' do
+ stub_config(snowplow_micro: { enabled: false })
+
+ expect(described_class).not_to be_snowplow_micro_enabled
+ end
+
+ it 'returns false when snowplow_micro is not configured' do
+ allow(Gitlab.config).to receive(:snowplow_micro).and_raise(Settingslogic::MissingSetting)
+
+ expect(described_class).not_to be_snowplow_micro_enabled
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
index 8e7bd7b84e6..f73155642d6 100644
--- a/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/instrumentations/database_metric_spec.rb
@@ -160,6 +160,38 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::DatabaseMetric do
end
end
end
+
+ context 'with custom timestamp column' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ metric_class.timestamp_column :last_edited_at
+ end.new(time_frame: '28d')
+ end
+
+ it 'calculates a correct result' do
+ create(:issue, last_edited_at: 5.days.ago)
+
+ expect(subject.value).to eq(1)
+ end
+ end
+
+ context 'with default timestamp column' do
+ subject do
+ described_class.tap do |metric_class|
+ metric_class.relation { Issue }
+ metric_class.operation :count
+ end.new(time_frame: '28d')
+ end
+
+ it 'calculates a correct result' do
+ create(:issue, last_edited_at: 5.days.ago)
+ create(:issue, created_at: 5.days.ago)
+
+ expect(subject.value).to eq(1)
+ end
+ end
end
context 'with unimplemented operation method used' do
diff --git a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
index 9ee8bc6b568..f9cd6e88e0a 100644
--- a/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
let(:key_path) { 'counts.jira_imports_total_imported_issues_count' }
let(:operation) { :sum }
let(:relation) { JiraImportState.finished }
- let(:column) { :imported_issues_count}
+ let(:column) { :imported_issues_count }
let(:name_suggestion) { /sum_imported_issues_count_from_<adjective describing\: '\(jira_imports\.status = \d+\)'>_jira_imports/ }
end
end
@@ -77,7 +77,7 @@ RSpec.describe Gitlab::Usage::Metrics::NameSuggestion do
let(:key_path) { 'counts.ci_pipeline_duration' }
let(:operation) { :average }
let(:relation) { Ci::Pipeline }
- let(:column) { :duration}
+ let(:column) { :duration }
let(:name_suggestion) { /average_duration_from_ci_pipelines/ }
end
end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index 167dba9b57d..7e8b15d23db 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
end
describe '#add_metric' do
- let(:metric) {'CountIssuesMetric' }
+ let(:metric) { 'CountIssuesMetric' }
it 'computes the suggested name for given metric' do
expect(described_class.add_metric(metric)).to eq('count_issues')
diff --git a/spec/lib/gitlab/usage/service_ping_report_spec.rb b/spec/lib/gitlab/usage/service_ping_report_spec.rb
index 1e8f9db4dea..7a37a31b195 100644
--- a/spec/lib/gitlab/usage/service_ping_report_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping_report_spec.rb
@@ -111,8 +111,12 @@ RSpec.describe Gitlab::Usage::ServicePingReport, :use_clean_rails_memory_store_c
# Because test cases are run inside a transaction, if any query raise and error all queries that follows
# it are automatically canceled by PostgreSQL, to avoid that problem, and to provide exhaustive information
# about every metric, queries are wrapped explicitly in sub transactions.
- ApplicationRecord.transaction do
- ApplicationRecord.connection.execute(query)&.first&.values&.first
+ table = PgQuery.parse(query).tables.first
+ gitlab_schema = Gitlab::Database::GitlabSchema.tables_to_schema[table]
+ base_model = gitlab_schema == :gitlab_main ? ApplicationRecord : Ci::ApplicationRecord
+
+ base_model.transaction do
+ base_model.connection.execute(query)&.first&.values&.first
end
rescue ActiveRecord::StatementInvalid => e
e.message
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index 54d49b432f4..e0b334cb5af 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -77,32 +77,18 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
describe '.unique_events_data' do
- context 'with use_redis_hll_instrumentation_classes feature enabled' do
- it 'does not include instrumented categories' do
- stub_feature_flags(use_redis_hll_instrumentation_classes: true)
-
- expect(described_class.unique_events_data.keys)
- .not_to include(*described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS)
- end
- end
-
- context 'with use_redis_hll_instrumentation_classes feature disabled' do
- it 'includes instrumented categories' do
- stub_feature_flags(use_redis_hll_instrumentation_classes: false)
-
- expect(described_class.unique_events_data.keys)
- .to include(*described_class::CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS)
- end
+ it 'does not include instrumented categories' do
+ expect(described_class.unique_events_data.keys)
+ .not_to include(*described_class.categories_collected_from_metrics_definitions)
end
end
end
describe '.categories' do
- it 'gets all unique category names' do
- expect(described_class.categories).to contain_exactly(
+ it 'gets CE unique category names' do
+ expect(described_class.categories).to include(
'deploy_token_packages',
'user_packages',
- 'compliance',
'ecosystem',
'analytics',
'ide_edit',
@@ -130,7 +116,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'work_items',
'ci_users',
'error_tracking',
- 'manage'
+ 'manage',
+ 'kubernetes_agent'
)
end
end
@@ -483,7 +470,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
describe '.weekly_redis_keys' do
using RSpec::Parameterized::TableSyntax
- let(:weekly_event) { 'g_compliance_dashboard' }
+ let(:weekly_event) { 'i_search_total' }
let(:redis_event) { described_class.send(:event_for, weekly_event) }
subject(:weekly_redis_keys) { described_class.send(:weekly_redis_keys, events: [redis_event], start_date: DateTime.parse(start_date), end_date: DateTime.parse(end_date)) }
@@ -493,13 +480,13 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'2020-12-21' | '2020-12-20' | []
'2020-12-21' | '2020-11-21' | []
'2021-01-01' | '2020-12-28' | []
- '2020-12-21' | '2020-12-28' | ['g_{compliance}_dashboard-2020-52']
- '2020-12-21' | '2021-01-01' | ['g_{compliance}_dashboard-2020-52']
- '2020-12-27' | '2021-01-01' | ['g_{compliance}_dashboard-2020-52']
- '2020-12-26' | '2021-01-04' | ['g_{compliance}_dashboard-2020-52', 'g_{compliance}_dashboard-2020-53']
- '2020-12-26' | '2021-01-11' | ['g_{compliance}_dashboard-2020-52', 'g_{compliance}_dashboard-2020-53', 'g_{compliance}_dashboard-2021-01']
- '2020-12-26' | '2021-01-17' | ['g_{compliance}_dashboard-2020-52', 'g_{compliance}_dashboard-2020-53', 'g_{compliance}_dashboard-2021-01']
- '2020-12-26' | '2021-01-18' | ['g_{compliance}_dashboard-2020-52', 'g_{compliance}_dashboard-2020-53', 'g_{compliance}_dashboard-2021-01', 'g_{compliance}_dashboard-2021-02']
+ '2020-12-21' | '2020-12-28' | ['i_{search}_total-2020-52']
+ '2020-12-21' | '2021-01-01' | ['i_{search}_total-2020-52']
+ '2020-12-27' | '2021-01-01' | ['i_{search}_total-2020-52']
+ '2020-12-26' | '2021-01-04' | ['i_{search}_total-2020-52', 'i_{search}_total-2020-53']
+ '2020-12-26' | '2021-01-11' | ['i_{search}_total-2020-52', 'i_{search}_total-2020-53', 'i_{search}_total-2021-01']
+ '2020-12-26' | '2021-01-17' | ['i_{search}_total-2020-52', 'i_{search}_total-2020-53', 'i_{search}_total-2021-01']
+ '2020-12-26' | '2021-01-18' | ['i_{search}_total-2020-52', 'i_{search}_total-2020-53', 'i_{search}_total-2021-01', 'i_{search}_total-2021-02']
end
with_them do
diff --git a/spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb
index 60c4424d2ae..b778f532a11 100644
--- a/spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/ipynb_diff_activity_counter_spec.rb
@@ -43,18 +43,18 @@ RSpec.describe Gitlab::UsageDataCounters::IpynbDiffActivityCounter, :clean_gitla
let(:for_commit) { true }
it_behaves_like 'an action that tracks events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION }
end
it_behaves_like 'an action that tracks events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION }
end
it_behaves_like 'an action that does not track events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION }
end
end
@@ -62,35 +62,35 @@ RSpec.describe Gitlab::UsageDataCounters::IpynbDiffActivityCounter, :clean_gitla
let(:for_mr) { true }
it_behaves_like 'an action that tracks events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION }
end
it_behaves_like 'an action that tracks events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION }
end
it_behaves_like 'an action that does not track events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION }
end
end
context 'note is for neither MR nor Commit' do
it_behaves_like 'an action that does not track events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_ACTION }
end
it_behaves_like 'an action that does not track events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_MR_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_MR_ACTION }
end
it_behaves_like 'an action that does not track events' do
- let(:action) {described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION}
- let(:per_user_action) {described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION}
+ let(:action) { described_class::NOTE_CREATED_IN_IPYNB_DIFF_COMMIT_ACTION }
+ let(:per_user_action) { described_class::USER_CREATED_NOTE_IN_IPYNB_DIFF_COMMIT_ACTION }
end
end
end
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index 1b73e5269d7..84a6f338282 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -6,7 +6,12 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
let_it_be(:user1) { build(:user, id: 1) }
let_it_be(:user2) { build(:user, id: 2) }
let_it_be(:user3) { build(:user, id: 3) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:category) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CATEGORY }
+ let_it_be(:event_action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_ACTION }
+ let_it_be(:event_label) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL }
+ let(:event_property) { action }
let(:time) { Time.zone.now }
context 'for Issue title edit actions' do
@@ -120,8 +125,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
context 'for Issue cloned actions' do
- it_behaves_like 'a daily tracked issuable event' do
- let(:action) { described_class::ISSUE_CLONED }
+ it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do
+ let_it_be(:action) { described_class::ISSUE_CLONED }
def track_action(params)
described_class.track_issue_cloned_action(**params)
@@ -239,8 +244,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
- context 'for Issue comment added actions' do
- it_behaves_like 'a daily tracked issuable event' do
+ context 'for Issue comment added actions', :snowplow do
+ it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do
let(:action) { described_class::ISSUE_COMMENT_ADDED }
def track_action(params)
@@ -249,8 +254,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
- context 'for Issue comment edited actions' do
- it_behaves_like 'a daily tracked issuable event' do
+ context 'for Issue comment edited actions', :snowplow do
+ it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do
let(:action) { described_class::ISSUE_COMMENT_EDITED }
def track_action(params)
@@ -259,8 +264,8 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
end
end
- context 'for Issue comment removed actions' do
- it_behaves_like 'a daily tracked issuable event' do
+ context 'for Issue comment removed actions', :snowplow do
+ it_behaves_like 'daily tracked issuable snowplow and service ping events with project' do
let(:action) { described_class::ISSUE_COMMENT_REMOVED }
def track_action(params)
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter_spec.rb
new file mode 100644
index 00000000000..e073fac504a
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataCounters::MergeRequestWidgetExtensionCounter do
+ it_behaves_like 'a redis usage counter', 'Widget Extension', :test_summary_count_expand
+
+ it_behaves_like 'a redis usage counter with totals', :i_code_review_merge_request_widget, test_summary_count_expand: 5
+end
diff --git a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
index 0264236f087..0bcdbe82a7a 100644
--- a/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/work_item_activity_unique_counter_spec.rb
@@ -20,4 +20,12 @@ RSpec.describe Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter, :clean_
it_behaves_like 'work item unique counter'
end
+
+ describe '.track_work_item_date_changed_action' do
+ subject(:track_event) { described_class.track_work_item_date_changed_action(author: user) }
+
+ let(:event_name) { described_class::WORK_ITEM_DATE_CHANGED }
+
+ it_behaves_like 'work item unique counter'
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 6eb00053b17..692b6483149 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -1203,12 +1203,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
describe 'redis_hll_counters' do
subject { described_class.redis_hll_counters }
- let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
+ let(:migrated_categories) do
+ ::Gitlab::UsageDataCounters::HLLRedisCounter.categories_collected_from_metrics_definitions
+ end
+ let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories - migrated_categories }
let(:ignored_metrics) { ["i_package_composer_deploy_token_weekly"] }
it 'has all known_events' do
- stub_feature_flags(use_redis_hll_instrumentation_classes: false)
expect(subject).to have_key(:redis_hll_counters)
expect(subject[:redis_hll_counters].keys).to match_array(categories)
diff --git a/spec/lib/gitlab/utils/batch_loader_spec.rb b/spec/lib/gitlab/utils/batch_loader_spec.rb
new file mode 100644
index 00000000000..c1f6d6df07a
--- /dev/null
+++ b/spec/lib/gitlab/utils/batch_loader_spec.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'batch-loader'
+
+RSpec.describe Gitlab::Utils::BatchLoader do
+ let(:stubbed_loader) do
+ double( # rubocop:disable RSpec/VerifiedDoubles
+ 'Loader',
+ load_lazy_method: [],
+ load_lazy_method_same_batch_key: [],
+ load_lazy_method_other_batch_key: []
+ )
+ end
+
+ let(:test_module) do
+ Module.new do
+ def self.lazy_method(id)
+ BatchLoader.for(id).batch(key: :my_batch_name) do |ids, loader|
+ stubbed_loader.load_lazy_method(ids)
+
+ ids.each { |id| loader.call(id, id) }
+ end
+ end
+
+ def self.lazy_method_same_batch_key(id)
+ BatchLoader.for(id).batch(key: :my_batch_name) do |ids, loader|
+ stubbed_loader.load_lazy_method_same_batch_key(ids)
+
+ ids.each { |id| loader.call(id, id) }
+ end
+ end
+
+ def self.lazy_method_other_batch_key(id)
+ BatchLoader.for(id).batch(key: :other_batch_name) do |ids, loader|
+ stubbed_loader.load_lazy_method_other_batch_key(ids)
+
+ ids.each { |id| loader.call(id, id) }
+ end
+ end
+ end
+ end
+
+ before do
+ BatchLoader::Executor.clear_current
+ allow(test_module).to receive(:stubbed_loader).and_return(stubbed_loader)
+ end
+
+ describe '.clear_key' do
+ it 'clears batched items which match the specified batch key' do
+ test_module.lazy_method(1)
+ test_module.lazy_method_same_batch_key(2)
+ test_module.lazy_method_other_batch_key(3)
+
+ described_class.clear_key(:my_batch_name)
+
+ test_module.lazy_method(4).to_i
+ test_module.lazy_method_same_batch_key(5).to_i
+ test_module.lazy_method_other_batch_key(6).to_i
+
+ expect(stubbed_loader).to have_received(:load_lazy_method).with([4])
+ expect(stubbed_loader).to have_received(:load_lazy_method_same_batch_key).with([5])
+ expect(stubbed_loader).to have_received(:load_lazy_method_other_batch_key).with([3, 6])
+ end
+
+ it 'clears loaded values which match the specified batch key' do
+ test_module.lazy_method(1).to_i
+ test_module.lazy_method_same_batch_key(2).to_i
+ test_module.lazy_method_other_batch_key(3).to_i
+
+ described_class.clear_key(:my_batch_name)
+
+ test_module.lazy_method(1).to_i
+ test_module.lazy_method_same_batch_key(2).to_i
+ test_module.lazy_method_other_batch_key(3).to_i
+
+ expect(stubbed_loader).to have_received(:load_lazy_method).with([1]).twice
+ expect(stubbed_loader).to have_received(:load_lazy_method_same_batch_key).with([2]).twice
+ expect(stubbed_loader).to have_received(:load_lazy_method_other_batch_key).with([3])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/link_header_parser_spec.rb b/spec/lib/gitlab/utils/link_header_parser_spec.rb
new file mode 100644
index 00000000000..e15ef930271
--- /dev/null
+++ b/spec/lib/gitlab/utils/link_header_parser_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Utils::LinkHeaderParser do
+ let(:parser) { described_class.new(header) }
+
+ describe '#parse' do
+ subject { parser.parse }
+
+ context 'with a valid header' do
+ let(:header) { generate_header(next: 'http://sandbox.org/next') }
+ let(:expected) { { next: { uri: URI('http://sandbox.org/next') } } }
+
+ it { is_expected.to eq(expected) }
+
+ context 'with multiple links' do
+ let(:header) { generate_header(next: 'http://sandbox.org/next', previous: 'http://sandbox.org/previous') }
+ let(:expected) do
+ {
+ next: { uri: URI('http://sandbox.org/next') },
+ previous: { uri: URI('http://sandbox.org/previous') }
+ }
+ end
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'with an incomplete uri' do
+ let(:header) { '<http://sandbox.org/next; rel="next"' }
+
+ it { is_expected.to eq({}) }
+ end
+
+ context 'with no rel' do
+ let(:header) { '<http://sandbox.org/next>; direction="next"' }
+
+ it { is_expected.to eq({}) }
+ end
+
+ context 'with multiple rel elements' do
+ # check https://datatracker.ietf.org/doc/html/rfc5988#section-5.3:
+ # occurrences after the first MUST be ignored by parsers
+ let(:header) { '<http://sandbox.org/next>; rel="next"; rel="dummy"' }
+
+ it { is_expected.to eq(expected) }
+ end
+
+ context 'when the url is too long' do
+ let(:header) { "<http://sandbox.org/#{'a' * 500}>; rel=\"next\"" }
+
+ it { is_expected.to eq({}) }
+ end
+ end
+
+ context 'with nil header' do
+ let(:header) { nil }
+
+ it { is_expected.to eq({}) }
+ end
+
+ context 'with empty header' do
+ let(:header) { '' }
+
+ it { is_expected.to eq({}) }
+ end
+
+ def generate_header(links)
+ stringified_links = links.map do |rel, url|
+ "<#{url}>; rel=\"#{rel}\""
+ end
+ stringified_links.join(', ')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
index 514051b1cc0..3ab592dfc62 100644
--- a/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
+++ b/spec/lib/gitlab/utils/sanitize_node_link_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Gitlab::Utils::SanitizeNodeLink do
describe "#safe_protocol?" do
let(:doc) { HTML::Pipeline.parse("<a href='#{scheme}alert(1);'>foo</a>") }
let(:node) { doc.children.first }
- let(:uri) { Addressable::URI.parse(node['href'])}
+ let(:uri) { Addressable::URI.parse(node['href']) }
it "returns false" do
expect(object.safe_protocol?(scheme)).to be_falsy
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
index 5350e090e2b..cb03797b3d9 100644
--- a/spec/lib/gitlab/utils/strong_memoize_spec.rb
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -1,10 +1,27 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
+require 'rspec-benchmark'
+
+RSpec.configure do |config|
+ config.include RSpec::Benchmark::Matchers
+end
RSpec.describe Gitlab::Utils::StrongMemoize do
let(:klass) do
- struct = Struct.new(:value) do
+ strong_memoize_class = described_class
+
+ Struct.new(:value) do
+ include strong_memoize_class
+
+ def self.method_added_list
+ @method_added_list ||= []
+ end
+
+ def self.method_added(name)
+ method_added_list << name
+ end
+
def method_name
strong_memoize(:method_name) do
trace << value
@@ -12,21 +29,56 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
end
+ def method_name_attr
+ trace << value
+ value
+ end
+ strong_memoize_attr :method_name_attr
+
+ strong_memoize_attr :different_method_name_attr, :different_member_name_attr
+ def different_method_name_attr
+ trace << value
+ value
+ end
+
+ strong_memoize_attr :enabled?
+ def enabled?
+ true
+ end
+
def trace
@trace ||= []
end
- end
- struct.include(described_class)
- struct
+ protected
+
+ def private_method
+ end
+ private :private_method
+ strong_memoize_attr :private_method
+
+ public
+
+ def protected_method
+ end
+ protected :protected_method
+ strong_memoize_attr :protected_method
+
+ private
+
+ def public_method
+ end
+ public :public_method
+ strong_memoize_attr :public_method
+ end
end
subject(:object) { klass.new(value) }
shared_examples 'caching the value' do
it 'only calls the block once' do
- value0 = object.method_name
- value1 = object.method_name
+ value0 = object.send(method_name)
+ value1 = object.send(method_name)
expect(value0).to eq(value)
expect(value1).to eq(value)
@@ -34,8 +86,8 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
it 'returns and defines the instance variable for the exact value' do
- returned_value = object.method_name
- memoized_value = object.instance_variable_get(:@method_name)
+ returned_value = object.send(method_name)
+ memoized_value = object.instance_variable_get(:"@#{member_name}")
expect(returned_value).to eql(value)
expect(memoized_value).to eql(value)
@@ -46,12 +98,19 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
[nil, false, true, 'value', 0, [0]].each do |value|
context "with value #{value}" do
let(:value) { value }
+ let(:method_name) { :method_name }
+ let(:member_name) { :method_name }
it_behaves_like 'caching the value'
- it 'raises exception for invalid key' do
+ it 'raises exception for invalid type as key' do
expect { object.strong_memoize(10) { 20 } }.to raise_error /Invalid type of '10'/
end
+
+ it 'raises exception for invalid characters in key' do
+ expect { object.strong_memoize(:enabled?) { 20 } }
+ .to raise_error /is not allowed as an instance variable name/
+ end
end
end
@@ -109,4 +168,64 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
expect(object.instance_variable_defined?(:@method_name)).to be(false)
end
end
+
+ describe '.strong_memoize_attr' do
+ [nil, false, true, 'value', 0, [0]].each do |value|
+ let(:value) { value }
+
+ context "memoized after method definition with value #{value}" do
+ let(:method_name) { :method_name_attr }
+ let(:member_name) { :method_name_attr }
+
+ it_behaves_like 'caching the value'
+
+ it 'calls the existing .method_added' do
+ expect(klass.method_added_list).to include(:method_name_attr)
+ end
+ end
+
+ context "memoized before method definition with different member name and value #{value}" do
+ let(:method_name) { :different_method_name_attr }
+ let(:member_name) { :different_member_name_attr }
+
+ it_behaves_like 'caching the value'
+
+ it 'calls the existing .method_added' do
+ expect(klass.method_added_list).to include(:different_method_name_attr)
+ end
+ end
+
+ context 'with valid method name' do
+ let(:method_name) { :enabled? }
+
+ context 'with invalid member name' do
+ let(:member_name) { :enabled? }
+
+ it 'is invalid' do
+ expect { object.send(method_name) { value } }.to raise_error /is not allowed as an instance variable name/
+ end
+ end
+ end
+ end
+
+ describe 'method visibility' do
+ it 'sets private visibility' do
+ expect(klass.private_instance_methods).to include(:private_method)
+ expect(klass.protected_instance_methods).not_to include(:private_method)
+ expect(klass.public_instance_methods).not_to include(:private_method)
+ end
+
+ it 'sets protected visibility' do
+ expect(klass.private_instance_methods).not_to include(:protected_method)
+ expect(klass.protected_instance_methods).to include(:protected_method)
+ expect(klass.public_instance_methods).not_to include(:protected_method)
+ end
+
+ it 'sets public visibility' do
+ expect(klass.private_instance_methods).not_to include(:public_method)
+ expect(klass.protected_instance_methods).not_to include(:public_method)
+ expect(klass.public_instance_methods).to include(:public_method)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 25ba5a3e09e..13d046b0816 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
describe '#add_metric' do
- let(:metric) { 'UuidMetric'}
+ let(:metric) { 'UuidMetric' }
it 'computes the metric value for given metric' do
expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid)
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 0648d276a6b..ad1a65ffae8 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe Gitlab::Utils do
end
it 'raises error for a non-string' do
- expect {check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths)}.to raise_error(StandardError)
+ expect { check_allowed_absolute_path_and_path_traversal!(nil, allowed_paths) }.to raise_error(StandardError)
end
it 'raises an exception if an absolute path is not allowed' do
@@ -128,7 +128,7 @@ RSpec.describe Gitlab::Utils do
end
describe '.allowlisted?' do
- let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd']}
+ let(:allowed_paths) { ['/home/foo', '/foo/bar', '/etc/passwd'] }
it 'returns true if path is allowed' do
expect(allowlisted?('/foo/bar', allowed_paths)).to be(true)
diff --git a/spec/lib/gitlab/verify/uploads_spec.rb b/spec/lib/gitlab/verify/uploads_spec.rb
index 3e5154d5029..f9aa196ffde 100644
--- a/spec/lib/gitlab/verify/uploads_spec.rb
+++ b/spec/lib/gitlab/verify/uploads_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe Gitlab::Verify::Uploads do
end
def perform_task
- described_class.new(batch_size: 100).run_batches { }
+ described_class.new(batch_size: 100).run_batches {}
end
end
end
diff --git a/spec/lib/gitlab/version_info_spec.rb b/spec/lib/gitlab/version_info_spec.rb
index 6ed094f11c8..078f952afad 100644
--- a/spec/lib/gitlab/version_info_spec.rb
+++ b/spec/lib/gitlab/version_info_spec.rb
@@ -79,11 +79,12 @@ RSpec.describe Gitlab::VersionInfo do
describe '.unknown' do
it { expect(@unknown).not_to be @v0_0_1 }
it { expect(@unknown).not_to be described_class.new }
- it { expect {@unknown > @v0_0_1}.to raise_error(ArgumentError) }
- it { expect {@unknown < @v0_0_1}.to raise_error(ArgumentError) }
+ it { expect { @unknown > @v0_0_1 }.to raise_error(ArgumentError) }
+ it { expect { @unknown < @v0_0_1 }.to raise_error(ArgumentError) }
end
describe '.parse' do
+ it { expect(described_class.parse(described_class.new(1, 0, 0))).to eq(@v1_0_0) }
it { expect(described_class.parse("1.0.0")).to eq(@v1_0_0) }
it { expect(described_class.parse("1.0.0.1")).to eq(@v1_0_0) }
it { expect(described_class.parse("1.0.0-ee")).to eq(@v1_0_0) }
@@ -133,6 +134,20 @@ RSpec.describe Gitlab::VersionInfo do
it { expect(@unknown.to_s).to eq("Unknown") }
end
+ describe '.to_json' do
+ let(:correct_version) do
+ "{\"major\":1,\"minor\":0,\"patch\":1}"
+ end
+
+ let(:unknown_version) do
+ "{\"major\":0,\"minor\":0,\"patch\":0}"
+ end
+
+ it { expect(@v1_0_1.to_json).to eq(correct_version) }
+ it { expect(@v1_0_1_rc2.to_json).to eq(correct_version) }
+ it { expect(@unknown.to_json).to eq(unknown_version) }
+ end
+
describe '.hash' do
it { expect(described_class.parse("1.0.0").hash).to eq(@v1_0_0.hash) }
it { expect(described_class.parse("1.0.0.1").hash).to eq(@v1_0_0.hash) }
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index ba49c00245e..aeca7b09a88 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -10,6 +10,25 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
let(:gcp_project_id) { String('gcp_proj_id') }
let(:operation) { true }
let(:database_instance) { Google::Apis::SqladminV1beta4::DatabaseInstance.new(state: 'RUNNABLE') }
+ let(:instance_name) { 'mock-instance-name' }
+ let(:root_password) { 'mock-root-password' }
+ let(:database_version) { 'mock-database-version' }
+ let(:region) { 'mock-region' }
+ let(:tier) { 'mock-tier' }
+
+ let(:database_list) do
+ Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: [
+ Google::Apis::SqladminV1beta4::Database.new(name: 'db_01', instance: database_instance),
+ Google::Apis::SqladminV1beta4::Database.new(name: 'db_02', instance: database_instance)
+ ])
+ end
+
+ let(:user_list) do
+ Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: [
+ Google::Apis::SqladminV1beta4::User.new(name: 'user_01', instance: database_instance),
+ Google::Apis::SqladminV1beta4::User.new(name: 'user_02', instance: database_instance)
+ ])
+ end
describe '.session_key_for_redirect_uri' do
let(:state) { 'random_string' }
@@ -217,7 +236,11 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
describe '#list_projects' do
subject { client.list_projects }
- let(:list_of_projects) { [{}, {}, {}] }
+ let(:gcp_project_01) { Google::Apis::CloudresourcemanagerV1::Project.new(project_id: '01') }
+ let(:gcp_project_02) { Google::Apis::CloudresourcemanagerV1::Project.new(project_id: '02') }
+ let(:gcp_project_03) { Google::Apis::CloudresourcemanagerV1::Project.new(project_id: '03') }
+ let(:list_of_projects) { [gcp_project_03, gcp_project_01, gcp_project_02] }
+
let(:next_page_token) { nil }
let(:operation) { double('projects': list_of_projects, 'next_page_token': next_page_token) }
@@ -225,7 +248,8 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
expect_any_instance_of(Google::Apis::CloudresourcemanagerV1::CloudResourceManagerService)
.to receive(:list_projects)
.and_return(operation)
- is_expected.to eq(list_of_projects)
+
+ is_expected.to contain_exactly(gcp_project_01, gcp_project_02, gcp_project_03)
end
end
@@ -337,6 +361,42 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
end
end
+ describe '#enable_cloud_sql_admin' do
+ subject { client.enable_cloud_sql_admin(gcp_project_id) }
+
+ it 'calls Google Api ServiceUsageService' do
+ expect_any_instance_of(Google::Apis::ServiceusageV1::ServiceUsageService)
+ .to receive(:enable_service)
+ .with("projects/#{gcp_project_id}/services/sqladmin.googleapis.com")
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
+ describe '#enable_compute' do
+ subject { client.enable_compute(gcp_project_id) }
+
+ it 'calls Google Api ServiceUsageService' do
+ expect_any_instance_of(Google::Apis::ServiceusageV1::ServiceUsageService)
+ .to receive(:enable_service)
+ .with("projects/#{gcp_project_id}/services/compute.googleapis.com")
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
+ describe '#enable_service_networking' do
+ subject { client.enable_service_networking(gcp_project_id) }
+
+ it 'calls Google Api ServiceUsageService' do
+ expect_any_instance_of(Google::Apis::ServiceusageV1::ServiceUsageService)
+ .to receive(:enable_service)
+ .with("projects/#{gcp_project_id}/services/servicenetworking.googleapis.com")
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
+
describe '#revoke_authorizations' do
subject { client.revoke_authorizations }
@@ -388,4 +448,57 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
is_expected.to eq(database_instance)
end
end
+
+ describe '#list_cloudsql_databases' do
+ subject { client.list_cloudsql_databases(:gcp_project_id, :instance_name) }
+
+ it 'calls Google Api SQLAdminService#list_databases' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:list_databases)
+ .with(any_args)
+ .and_return(database_list)
+ is_expected.to eq(database_list)
+ end
+ end
+
+ describe '#list_cloudsql_users' do
+ subject { client.list_cloudsql_users(:gcp_project_id, :instance_name) }
+
+ it 'calls Google Api SQLAdminService#list_users' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:list_users)
+ .with(any_args)
+ .and_return(user_list)
+ is_expected.to eq(user_list)
+ end
+ end
+
+ describe '#create_cloudsql_instance' do
+ subject do
+ client.create_cloudsql_instance(
+ gcp_project_id,
+ instance_name,
+ root_password,
+ database_version,
+ region,
+ tier
+ )
+ end
+
+ it 'calls Google Api SQLAdminService#insert_instance' do
+ expect_any_instance_of(Google::Apis::SqladminV1beta4::SQLAdminService)
+ .to receive(:insert_instance)
+ .with(gcp_project_id,
+ having_attributes(
+ class: ::Google::Apis::SqladminV1beta4::DatabaseInstance,
+ name: instance_name,
+ root_password: root_password,
+ database_version: database_version,
+ region: region,
+ settings: instance_of(Google::Apis::SqladminV1beta4::Settings)
+ ))
+ .and_return(operation)
+ is_expected.to eq(operation)
+ end
+ end
end
diff --git a/spec/lib/json_web_token/rsa_token_spec.rb b/spec/lib/json_web_token/rsa_token_spec.rb
index 6d2026752d6..b77345d8b7a 100644
--- a/spec/lib/json_web_token/rsa_token_spec.rb
+++ b/spec/lib/json_web_token/rsa_token_spec.rb
@@ -30,8 +30,9 @@ RSpec.describe JSONWebToken::RSAToken do
subject { JWT.decode(rsa_encoded, rsa_key, true, { algorithm: 'RS256' }) }
- it { expect {subject}.not_to raise_error }
+ it { expect { subject }.not_to raise_error }
it { expect(subject.first).to include('key' => 'value') }
+
it do
expect(subject.second).to eq(
"typ" => "JWT",
@@ -45,7 +46,7 @@ RSpec.describe JSONWebToken::RSAToken do
subject { JWT.decode(rsa_encoded, new_key, true, { algorithm: 'RS256' }) }
- it { expect {subject}.to raise_error(JWT::DecodeError) }
+ it { expect { subject }.to raise_error(JWT::DecodeError) }
end
end
end
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 693b7bd45c9..59add4e8347 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -11,10 +11,15 @@ RSpec.describe 'Marginalia spec' do
render body: nil
end
+ def first_ci_pipeline
+ Ci::Pipeline.first
+ render body: nil
+ end
+
private
[:auth_user, :current_user, :set_experimentation_subject_id_cookie, :signed_in?].each do |method|
- define_method(method) { }
+ define_method(method) {}
end
end
@@ -36,7 +41,7 @@ RSpec.describe 'Marginalia spec' do
describe 'For rails web requests' do
let(:correlation_id) { SecureRandom.uuid }
- let(:recorded) { ActiveRecord::QueryRecorder.new { make_request(correlation_id) } }
+ let(:recorded) { ActiveRecord::QueryRecorder.new { make_request(correlation_id, :first_user) } }
let(:component_map) do
{
@@ -54,10 +59,11 @@ RSpec.describe 'Marginalia spec' do
end
context 'when using CI database' do
+ let(:recorded) { ActiveRecord::QueryRecorder.new { make_request(correlation_id, :first_ci_pipeline) } }
let(:component_map) do
{
"application" => "test",
- "endpoint_id" => "MarginaliaTestController#first_user",
+ "endpoint_id" => "MarginaliaTestController#first_ci_pipeline",
"correlation_id" => correlation_id,
"db_config_name" => 'ci'
}
@@ -65,8 +71,6 @@ RSpec.describe 'Marginalia spec' do
before do
skip_if_multiple_databases_not_setup
-
- allow(User).to receive(:connection) { Ci::ApplicationRecord.connection }
end
it 'generates a query that includes the component and value' do
@@ -140,11 +144,11 @@ RSpec.describe 'Marginalia spec' do
end
end
- def make_request(correlation_id)
+ def make_request(correlation_id, action_name)
request_env = Rack::MockRequest.env_for('/')
::Labkit::Correlation::CorrelationId.use_id(correlation_id) do
- MarginaliaTestController.action(:first_user).call(request_env)
+ MarginaliaTestController.action(action_name).call(request_env)
end
end
end
diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb
index 2158076e4b5..d208ef93224 100644
--- a/spec/lib/mattermost/session_spec.rb
+++ b/spec/lib/mattermost/session_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Mattermost::Session, type: :request do
describe '#with session' do
let(:location) { 'http://location.tld' }
- let(:cookie_header) {'MMOAUTH=taskik8az7rq8k6rkpuas7htia; Path=/;'}
+ let(:cookie_header) { 'MMOAUTH=taskik8az7rq8k6rkpuas7htia; Path=/;' }
let!(:stub) do
stub_full_request("#{mattermost_url}/oauth/gitlab/login")
.to_return(headers: { 'location' => location, 'Set-Cookie' => cookie_header }, status: 302)
diff --git a/spec/lib/microsoft_teams/notifier_spec.rb b/spec/lib/microsoft_teams/notifier_spec.rb
index 3b7892334dd..905b118d934 100644
--- a/spec/lib/microsoft_teams/notifier_spec.rb
+++ b/spec/lib/microsoft_teams/notifier_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe MicrosoftTeams::Notifier do
subject { described_class.new(webhook_url) }
- let(:webhook_url) { 'https://example.gitlab.com/'}
+ let(:webhook_url) { 'https://example.gitlab.com/' }
let(:header) { { 'Content-Type' => 'application/json' } }
let(:options) do
{
diff --git a/spec/lib/release_highlights/validator/entry_spec.rb b/spec/lib/release_highlights/validator/entry_spec.rb
index 5f7ccbf4310..b8b745ac8cd 100644
--- a/spec/lib/release_highlights/validator/entry_spec.rb
+++ b/spec/lib/release_highlights/validator/entry_spec.rb
@@ -25,18 +25,18 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
it 'returns line numbers in errors' do
subject.valid?
- expect(entry.errors[:packages].first).to match('(line 6)')
+ expect(entry.errors[:available_in].first).to match('(line 6)')
end
end
context 'with a blank entry' do
- it 'validate presence of title, body and stage' do
+ it 'validate presence of name, description and stage' do
subject.valid?
- expect(subject.errors[:title]).not_to be_empty
- expect(subject.errors[:body]).not_to be_empty
+ expect(subject.errors[:name]).not_to be_empty
+ expect(subject.errors[:description]).not_to be_empty
expect(subject.errors[:stage]).not_to be_empty
- expect(subject.errors[:packages]).not_to be_empty
+ expect(subject.errors[:available_in]).not_to be_empty
end
it 'validates boolean value of "self-managed" and "gitlab-com"' do
@@ -52,11 +52,11 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
it 'validates URI of "url" and "image_url"' do
stub_env('RSPEC_ALLOW_INVALID_URLS', 'false')
allow(entry).to receive(:value_for).with(:image_url).and_return('https://foobar.x/images/ci/gitlab-ci-cd-logo_2x.png')
- allow(entry).to receive(:value_for).with(:url).and_return('')
+ allow(entry).to receive(:value_for).with(:documentation_link).and_return('')
subject.valid?
- expect(subject.errors[:url]).to include(/must be a valid URL/)
+ expect(subject.errors[:documentation_link]).to include(/must be a valid URL/)
expect(subject.errors[:image_url]).to include(/is blocked: Host cannot be resolved or invalid/)
end
@@ -76,12 +76,12 @@ RSpec.describe ReleaseHighlights::Validator::Entry do
expect(subject.errors[:published_at]).to include(/must be valid Date/)
end
- it 'validates packages are included in list' do
- allow(entry).to receive(:value_for).with(:packages).and_return(['ALL'])
+ it 'validates available_in are included in list' do
+ allow(entry).to receive(:value_for).with(:available_in).and_return(['ALL'])
subject.valid?
- expect(subject.errors[:packages].first).to include("must be one of", "Free", "Premium", "Ultimate")
+ expect(subject.errors[:available_in].first).to include("must be one of", "Free", "Premium", "Ultimate")
end
end
end
diff --git a/spec/lib/release_highlights/validator_spec.rb b/spec/lib/release_highlights/validator_spec.rb
index f30754b4167..dd1b3aa4803 100644
--- a/spec/lib/release_highlights/validator_spec.rb
+++ b/spec/lib/release_highlights/validator_spec.rb
@@ -70,7 +70,7 @@ RSpec.describe ReleaseHighlights::Validator do
---------------------------------------------------------
Validation failed for spec/fixtures/whats_new/invalid.yml
---------------------------------------------------------
- * Packages must be one of ["Free", "Premium", "Ultimate"] (line 6)
+ * Available in must be one of ["Free", "Premium", "Ultimate"] (line 6)
MESSAGE
end
diff --git a/spec/lib/security/report_schema_version_matcher_spec.rb b/spec/lib/security/report_schema_version_matcher_spec.rb
new file mode 100644
index 00000000000..9c40f0bc6fa
--- /dev/null
+++ b/spec/lib/security/report_schema_version_matcher_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Security::ReportSchemaVersionMatcher do
+ let(:vendored_versions) { %w[14.0.0 14.0.1 14.0.2 14.1.0] }
+ let(:version_finder) do
+ described_class.new(
+ report_declared_version: report_version,
+ supported_versions: vendored_versions
+ )
+ end
+
+ describe '#call' do
+ subject { version_finder.call }
+
+ context 'when minor version matches' do
+ context 'and report schema patch version does not match any vendored schema versions' do
+ context 'and report version is 14.1.1' do
+ let(:report_version) { '14.1.1' }
+
+ it 'returns 14.1.0' do
+ expect(subject).to eq('14.1.0')
+ end
+ end
+
+ context 'and report version is 14.0.32' do
+ let(:report_version) { '14.0.32' }
+
+ it 'returns 14.0.2' do
+ expect(subject).to eq('14.0.2')
+ end
+ end
+ end
+ end
+
+ context 'when report minor version does not match' do
+ let(:report_version) { '14.2.1' }
+
+ it 'does not return a version' do
+ expect(subject).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb
index 5f67ee11970..1b27db53b6f 100644
--- a/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/group_information_menu_spec.rb
@@ -18,13 +18,13 @@ RSpec.describe Sidebars::Groups::Menus::GroupInformationMenu do
subject { described_class.new(context).title }
context 'when group is a root group' do
- specify { is_expected.to eq 'Group information'}
+ specify { is_expected.to eq 'Group information' }
end
context 'when group is a child group' do
let(:group) { build(:group, parent: root_group) }
- specify { is_expected.to eq 'Subgroup information'}
+ specify { is_expected.to eq 'Subgroup information' }
end
end
@@ -32,13 +32,13 @@ RSpec.describe Sidebars::Groups::Menus::GroupInformationMenu do
subject { described_class.new(context).sprite_icon }
context 'when group is a root group' do
- specify { is_expected.to eq 'group'}
+ specify { is_expected.to eq 'group' }
end
context 'when group is a child group' do
let(:group) { build(:group, parent: root_group) }
- specify { is_expected.to eq 'subgroup'}
+ specify { is_expected.to eq 'subgroup' }
end
end
diff --git a/spec/lib/sidebars/menu_spec.rb b/spec/lib/sidebars/menu_spec.rb
index bdd9f22d5a0..53a889c2db8 100644
--- a/spec/lib/sidebars/menu_spec.rb
+++ b/spec/lib/sidebars/menu_spec.rb
@@ -219,7 +219,7 @@ RSpec.describe Sidebars::Menu do
end
describe '#link' do
- let(:foo_path) { '/foo_path'}
+ let(:foo_path) { '/foo_path' }
let(:foo_menu) do
::Sidebars::MenuItem.new(
diff --git a/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb b/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb
index dfb3c511470..4e17e91f019 100644
--- a/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb
+++ b/spec/lib/tasks/gitlab/metrics_exporter_task_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'gitlab:metrics_exporter:install' do
let(:expected_clone_params) do
{
repo: 'https://gitlab.com/gitlab-org/gitlab-metrics-exporter.git',
- version: 'main',
+ version: an_instance_of(String),
target_dir: 'path/to/exporter'
}
end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
index e2ccbd92504..a808aec7728 100644
--- a/spec/lib/unnested_in_filters/rewriter_spec.rb
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -88,6 +88,35 @@ RSpec.describe UnnestedInFilters::Rewriter do
expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
end
+ context 'when the relation has a subquery' do
+ let(:relation) { User.where(state: User.select(:state), user_type: %i(support_bot alert_bot)).limit(1) }
+
+ let(:expected_query) do
+ <<~SQL
+ SELECT
+ "users".*
+ FROM
+ unnest(ARRAY(SELECT "users"."state" FROM "users")::character varying[]) AS "states"("state"),
+ unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
+ LATERAL (
+ SELECT
+ "users".*
+ FROM
+ "users"
+ WHERE
+ (users."state" = "states"."state") AND
+ (users."user_type" = "user_types"."user_type")
+ LIMIT 1
+ ) AS users
+ LIMIT 1
+ SQL
+ end
+
+ it 'changes the query' do
+ expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ end
+ end
+
context 'when there is an order' do
let(:relation) { User.where(state: %w(active blocked banned)).order(order).limit(2) }
let(:expected_query) do
diff --git a/spec/mailers/emails/admin_notification_spec.rb b/spec/mailers/emails/admin_notification_spec.rb
index 1b770d6d4a2..33b8558bfa3 100644
--- a/spec/mailers/emails/admin_notification_spec.rb
+++ b/spec/mailers/emails/admin_notification_spec.rb
@@ -11,68 +11,4 @@ RSpec.describe Emails::AdminNotification do
expect(Notify).to be_respond_to(email_method)
end
end
-
- describe 'user_auto_banned_email' do
- let_it_be(:admin) { create(:user) }
- let_it_be(:user) { create(:user) }
-
- let(:max_project_downloads) { 5 }
- let(:time_period) { 600 }
- let(:group) { nil }
-
- subject do
- Notify.user_auto_banned_email(
- admin.id, user.id,
- max_project_downloads: max_project_downloads,
- within_seconds: time_period,
- group: group
- )
- end
-
- it_behaves_like 'an email sent from GitLab'
- it_behaves_like 'it should not have Gmail Actions links'
- it_behaves_like 'a user cannot unsubscribe through footer link'
- it_behaves_like 'appearance header and footer enabled'
- it_behaves_like 'appearance header and footer not enabled'
-
- it 'is sent to the administrator' do
- is_expected.to deliver_to admin.email
- end
-
- it 'has the correct subject' do
- is_expected.to have_subject "We've detected unusual activity"
- end
-
- it 'includes the name of the user' do
- is_expected.to have_body_text user.name
- end
-
- it 'includes the scope of the ban' do
- is_expected.to have_body_text "banned from your GitLab instance"
- end
-
- it 'includes the reason' do
- is_expected.to have_body_text "due to them downloading more than 5 project repositories within 10 minutes"
- end
-
- it 'includes a link to unban the user' do
- is_expected.to have_body_text admin_users_url(filter: 'banned')
- end
-
- it 'includes a link to change the settings' do
- is_expected.to have_body_text network_admin_application_settings_url(anchor: 'js-ip-limits-settings')
- end
-
- it 'includes the email reason' do
- is_expected.to have_body_text %r{You're receiving this email because of your account on <a .*>localhost<\/a>}
- end
-
- context 'when scoped to a group' do
- let(:group) { create(:group) }
-
- it 'includes the scope of the ban' do
- is_expected.to have_body_text "banned from your group (#{group.name})"
- end
- end
- end
end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index 09ed27eb90f..fce55256922 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe Emails::Profile do
describe 'for users that signed up, the email' do
let(:example_site_path) { root_path }
- let(:new_user) { create(:user, email: new_user_address, password: "securePassword") }
+ let(:new_user) { create(:user, email: new_user_address) }
subject { Notify.new_user_email(new_user.id) }
@@ -59,6 +59,7 @@ RSpec.describe Emails::Profile do
it_behaves_like 'a user cannot unsubscribe through footer link'
it 'does not contain the new user\'s password' do
+ is_expected.not_to have_body_text(new_user.password)
is_expected.not_to have_body_text /password/
end
end
diff --git a/spec/migrations/20210421163509_schedule_update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/migrations/20210421163509_schedule_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
deleted file mode 100644
index 9a59c739ecd..00000000000
--- a/spec/migrations/20210421163509_schedule_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleUpdateJiraTrackerDataDeploymentTypeBasedOnUrl, :migration do
- let(:services_table) { table(:services) }
- let(:service_jira_cloud) { services_table.create!(id: 1, type: 'JiraService') }
- let(:service_jira_server) { services_table.create!(id: 2, type: 'JiraService') }
-
- before do
- jira_tracker_data = Class.new(ApplicationRecord) do
- self.table_name = 'jira_tracker_data'
-
- def self.encryption_options
- {
- key: Settings.attr_encrypted_db_key_base_32,
- encode: true,
- mode: :per_attribute_iv,
- algorithm: 'aes-256-gcm'
- }
- end
-
- attr_encrypted :url, encryption_options
- attr_encrypted :api_url, encryption_options
- attr_encrypted :username, encryption_options
- attr_encrypted :password, encryption_options
- end
-
- stub_const('JiraTrackerData', jira_tracker_data)
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, service_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: 0) }
- let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, service_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: 0) }
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migration' do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(described_class::MIGRATION).to be_scheduled_migration(tracker_data_cloud.id, tracker_data_cloud.id)
- expect(described_class::MIGRATION).to be_scheduled_migration(tracker_data_server.id, tracker_data_server.id)
- end
-end
diff --git a/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb b/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
index d35184e78a8..8dfeacc4774 100644
--- a/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
+++ b/spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
-RSpec.describe CascadeDeleteFreezePeriods do
+RSpec.describe CascadeDeleteFreezePeriods, :suppress_gitlab_schemas_validate_connection do
let(:namespace) { table(:namespaces).create!(name: 'deploy_freeze', path: 'deploy_freeze') }
let(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) }
let(:freeze_periods) { table(:ci_freeze_periods) }
diff --git a/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb b/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb
index 29f554a003b..13a6aa5413e 100644
--- a/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb
+++ b/spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe BackfillProjectsWithCoverage do
+RSpec.describe BackfillProjectsWithCoverage, :suppress_gitlab_schemas_validate_connection do
let(:projects) { table(:projects) }
let(:ci_pipelines) { table(:ci_pipelines) }
let(:ci_daily_build_group_report_results) { table(:ci_daily_build_group_report_results) }
diff --git a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
index c1d96f50dc8..cf6a033b4b8 100644
--- a/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
+++ b/spec/migrations/20211116111644_schedule_remove_occurrence_pipelines_and_duplicate_vulnerabilities_findings_spec.rb
@@ -3,7 +3,8 @@ require 'spec_helper'
require_migration!
-RSpec.describe ScheduleRemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings, :migration do
+RSpec.describe ScheduleRemoveOccurrencePipelinesAndDuplicateVulnerabilitiesFindings,
+ :suppress_gitlab_schemas_validate_connection, :migration do
let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let_it_be(:users) { table(:users) }
diff --git a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb b/spec/migrations/20220124130028_dedup_runner_projects_spec.rb
index 127f4798f33..3429ccc4df1 100644
--- a/spec/migrations/20220124130028_dedup_runner_projects_spec.rb
+++ b/spec/migrations/20220124130028_dedup_runner_projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe DedupRunnerProjects, :migration, schema: 20220120085655 do
+RSpec.describe DedupRunnerProjects, :migration, :suppress_gitlab_schemas_validate_connection, schema: 20220120085655 do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:runners) { table(:ci_runners) }
diff --git a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb b/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb
index a48464befdf..a23f9995875 100644
--- a/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb
+++ b/spec/migrations/20220128155251_remove_dangling_running_builds_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!('remove_dangling_running_builds')
-RSpec.describe RemoveDanglingRunningBuilds do
+RSpec.describe RemoveDanglingRunningBuilds, :suppress_gitlab_schemas_validate_connection do
let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let(:project) { table(:projects).create!(namespace_id: namespace.id) }
let(:runner) { table(:ci_runners).create!(runner_type: 1) }
@@ -47,6 +47,6 @@ RSpec.describe RemoveDanglingRunningBuilds do
migrate!
expect(running_metadata.reload).to be_present
- expect { failed_metadata.reload } .to raise_error(ActiveRecord::RecordNotFound)
+ expect { failed_metadata.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
end
diff --git a/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb b/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb
index 0c4d0e86789..ec58a54b085 100644
--- a/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb
+++ b/spec/migrations/20220505174658_update_index_on_alerts_to_exclude_null_fingerprints_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe UpdateIndexOnAlertsToExcludeNullFingerprints do
- let(:alerts) { 'alert_management_alerts'}
+ let(:alerts) { 'alert_management_alerts' }
let(:old_index) { described_class::OLD_INDEX_NAME }
let(:new_index) { described_class::NEW_INDEX_NAME }
diff --git a/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb b/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb
new file mode 100644
index 00000000000..411b1eacb86
--- /dev/null
+++ b/spec/migrations/20220506154054_create_sync_namespace_details_trigger_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe CreateSyncNamespaceDetailsTrigger do
+ let(:migration) { described_class.new }
+ let(:namespaces) { table(:namespaces) }
+ let(:namespace_details) { table(:namespace_details) }
+ let!(:timestamp) { Time.new(2020, 01, 01).utc }
+
+ let(:synced_attributes) do
+ {
+ description: 'description',
+ description_html: '<p>description</p>',
+ cached_markdown_version: 1966080,
+ created_at: timestamp,
+ updated_at: timestamp
+ }
+ end
+
+ let(:other_attributes) do
+ {
+ name: 'name',
+ path: 'path'
+ }
+ end
+
+ let(:attributes) { other_attributes.merge(synced_attributes) }
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'creates a namespace_detail record' do
+ expect do
+ namespaces.create!(attributes)
+ end.to change(namespace_details, :count).by(1)
+ end
+
+ it 'the created namespace_details record has matching attributes' do
+ namespaces.create!(attributes)
+ synced_namespace_details = namespace_details.last
+
+ expect(synced_namespace_details).to have_attributes(synced_attributes)
+ end
+ end
+
+ describe 'UPDATE trigger' do
+ let!(:namespace) { namespaces.create!(attributes) }
+
+ it 'updates the attribute in the synced namespace_details record' do
+ namespace.update!(description: 'new_description')
+
+ synced_namespace_details = namespace_details.last
+ expect(synced_namespace_details.description).to eq('new_description')
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the trigger' do
+ expect do
+ namespaces.create!(attributes)
+ end.not_to change(namespace_details, :count)
+ end
+ end
+end
diff --git a/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb b/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb
new file mode 100644
index 00000000000..f85a59357e1
--- /dev/null
+++ b/spec/migrations/20220524184149_create_sync_project_namespace_details_trigger_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe CreateSyncProjectNamespaceDetailsTrigger do
+ let(:migration) { described_class.new }
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:namespace_details) { table(:namespace_details) }
+ let!(:timestamp) { Time.new(2020, 01, 01).utc }
+ let!(:project_namespace) { namespaces.create!(name: 'name', path: 'path') }
+ let!(:namespace) { namespaces.create!(name: 'group', path: 'group_path') }
+
+ let(:synced_attributes) do
+ {
+ description: 'description',
+ description_html: '<p>description</p>',
+ cached_markdown_version: 1966080,
+ updated_at: timestamp
+ }
+ end
+
+ let(:other_attributes) do
+ {
+ name: 'project_name',
+ project_namespace_id: project_namespace.id,
+ namespace_id: namespace.id
+ }
+ end
+
+ let(:attributes) { other_attributes.merge(synced_attributes) }
+
+ describe '#up' do
+ before do
+ migrate!
+ end
+
+ describe 'INSERT trigger' do
+ it 'the created namespace_details record has matching attributes' do
+ project = projects.create!(attributes)
+ synced_namespace_details = namespace_details.find_by(namespace_id: project.project_namespace_id)
+
+ expect(synced_namespace_details).to have_attributes(synced_attributes)
+ end
+ end
+
+ describe 'UPDATE trigger' do
+ let!(:project) { projects.create!(attributes) }
+
+ it 'updates the attribute in the synced namespace_details record' do
+ project.update!(description: 'new_description')
+
+ synced_namespace_details = namespace_details.find_by(namespace_id: project.project_namespace_id)
+ expect(synced_namespace_details.description).to eq('new_description')
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ migration.up
+ migration.down
+ end
+
+ it 'drops the trigger' do
+ expect do
+ projects.create!(attributes)
+ end.not_to change(namespace_details, :count)
+ end
+ end
+end
diff --git a/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb b/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb
new file mode 100644
index 00000000000..3f1a2d8c4b9
--- /dev/null
+++ b/spec/migrations/20220525221133_schedule_backfill_vulnerability_reads_cluster_agent_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleBackfillVulnerabilityReadsClusterAgent do
+ let_it_be(:batched_migration) { described_class::MIGRATION_NAME }
+
+ it 'schedules background jobs for each batch of vulnerability reads' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb b/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
new file mode 100644
index 00000000000..68fac1c2221
--- /dev/null
+++ b/spec/migrations/20220607082910_add_sync_tmp_index_for_potentially_misassociated_vulnerability_occurrences_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+require_migration!
+
+RSpec.describe AddSyncTmpIndexForPotentiallyMisassociatedVulnerabilityOccurrences do
+ let(:table) { "vulnerability_occurrences" }
+ let(:index) { described_class::INDEX_NAME }
+
+ it "creates and drops the index" do
+ reversible_migration do |migration|
+ migration.before -> do
+ expect(ActiveRecord::Base.connection.indexes(table).map(&:name)).not_to include(index)
+ end
+
+ migration.after -> do
+ expect(ActiveRecord::Base.connection.indexes(table).map(&:name)).to include(index)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb b/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
new file mode 100644
index 00000000000..b17a0215f4e
--- /dev/null
+++ b/spec/migrations/20220721031446_schedule_disable_legacy_open_source_license_for_one_member_no_repo_projects_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects do
+ context 'when on gitlab.com' do
+ let(:migration) { described_class::MIGRATION }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of projects' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+ end
+
+ context 'when on self-managed instance' do
+ let(:migration) { described_class.new }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ describe '#up' do
+ it 'does not schedule background job' do
+ expect(migration).not_to receive(:queue_batched_background_migration)
+
+ migration.up
+ end
+ end
+
+ describe '#down' do
+ it 'does not delete background job' do
+ expect(migration).not_to receive(:delete_batched_background_migration)
+
+ migration.down
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
index e3bc832a10b..cb0f941aea1 100644
--- a/spec/migrations/20220520040416_schedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
+++ b/spec/migrations/20220722084543_schedule_disable_legacy_open_source_license_for_no_issues_no_repo_projects_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
require_migration!
-RSpec.describe ScheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects do
- context 'on gitlab.com' do
+RSpec.describe ScheduleDisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects do
+ context 'when on gitlab.com' do
let(:migration) { described_class::MIGRATION }
before do
@@ -21,6 +21,7 @@ RSpec.describe ScheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects d
column_name: :id,
interval: described_class::INTERVAL,
batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
sub_batch_size: described_class::SUB_BATCH_SIZE
)
)
@@ -37,7 +38,7 @@ RSpec.describe ScheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects d
end
end
- context 'on self-managed instance' do
+ context 'when on self-managed instance' do
let(:migration) { described_class.new }
before do
diff --git a/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb b/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
new file mode 100644
index 00000000000..99a30c7f2a9
--- /dev/null
+++ b/spec/migrations/20220722110026_reschedule_set_legacy_open_source_license_available_for_non_public_projects_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RescheduleSetLegacyOpenSourceLicenseAvailableForNonPublicProjects do
+ context 'when on gitlab.com' do
+ let(:migration) { described_class::MIGRATION }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of projects' do
+ migrate!
+
+ expect(migration).to(
+ have_scheduled_batched_migration(
+ table_name: :projects,
+ column_name: :id,
+ interval: described_class::INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+ end
+
+ context 'when on self-managed instance' do
+ let(:migration) { described_class.new }
+
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ describe '#up' do
+ it 'does not schedule background job' do
+ expect(migration).not_to receive(:queue_batched_background_migration)
+
+ migration.up
+ end
+ end
+
+ describe '#down' do
+ it 'does not delete background job' do
+ expect(migration).not_to receive(:delete_batched_background_migration)
+
+ migration.down
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb b/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
new file mode 100644
index 00000000000..2651e46ba53
--- /dev/null
+++ b/spec/migrations/20220725150127_update_jira_tracker_data_deployment_type_based_on_url_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe UpdateJiraTrackerDataDeploymentTypeBasedOnUrl, :migration do
+ let(:integrations_table) { table(:integrations) }
+ let(:service_jira_cloud) { integrations_table.create!(id: 1, type_new: 'JiraService') }
+ let(:service_jira_server) { integrations_table.create!(id: 2, type_new: 'JiraService') }
+
+ before do
+ jira_tracker_data = Class.new(ApplicationRecord) do
+ self.table_name = 'jira_tracker_data'
+
+ def self.encryption_options
+ {
+ key: Settings.attr_encrypted_db_key_base_32,
+ encode: true,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm'
+ }
+ end
+
+ attr_encrypted :url, encryption_options
+ attr_encrypted :api_url, encryption_options
+ attr_encrypted :username, encryption_options
+ attr_encrypted :password, encryption_options
+ end
+
+ stub_const('JiraTrackerData', jira_tracker_data)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
+ stub_const("#{described_class}::SUB_BATCH_SIZE", 1)
+ end
+
+ # rubocop:disable Layout/LineLength
+ # rubocop:disable RSpec/ScatteredLet
+ let!(:tracker_data_cloud) { JiraTrackerData.create!(id: 1, integration_id: service_jira_cloud.id, url: "https://test-domain.atlassian.net", deployment_type: 0) }
+ let!(:tracker_data_server) { JiraTrackerData.create!(id: 2, integration_id: service_jira_server.id, url: "http://totally-not-jira-server.company.org", deployment_type: 0) }
+ # rubocop:enable Layout/LineLength
+ # rubocop:enable RSpec/ScatteredLet
+
+ around do |example|
+ freeze_time { Sidekiq::Testing.fake! { example.run } }
+ end
+
+ let(:migration) { described_class::MIGRATION } # rubocop:disable RSpec/ScatteredLet
+
+ it 'schedules background migration' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :jira_tracker_data,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ gitlab_schema: :gitlab_main
+ )
+ end
+end
diff --git a/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb b/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb
new file mode 100644
index 00000000000..cc1c1dac4c3
--- /dev/null
+++ b/spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RescheduleBackfillContainerRegistrySizeIntoProjectStatistics do
+ let_it_be(:batched_migration) { described_class::MIGRATION_CLASS }
+
+ it 'does not schedule background jobs when Gitlab.com is false' do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+
+ it 'schedules background jobs for each batch of container_repository' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :container_repositories,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb b/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb
new file mode 100644
index 00000000000..3ea286ca138
--- /dev/null
+++ b/spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveDeactivatedUserHighestRoleStats do
+ let!(:users) { table(:users) }
+ let!(:user_highest_roles) { table(:user_highest_roles) }
+
+ let!(:user1) do
+ users.create!(username: 'user1', email: 'user1@example.com', projects_limit: 10, state: 'active')
+ end
+
+ let!(:user2) do
+ users.create!(username: 'user2', email: 'user2@example.com', projects_limit: 10, state: 'deactivated')
+ end
+
+ let!(:highest_role1) { user_highest_roles.create!(user_id: user1.id) }
+ let!(:highest_role2) { user_highest_roles.create!(user_id: user2.id) }
+
+ describe '#up' do
+ context 'when on gitlab.com' do
+ it 'does not change user highest role records' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ expect { migrate! }.not_to change(user_highest_roles, :count)
+ end
+ end
+
+ context 'when not on gitlab.com' do
+ it 'removes all user highest role records for deactivated users' do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ migrate!
+ expect(user_highest_roles.pluck(:user_id)).to contain_exactly(
+ user1.id
+ )
+ end
+ end
+ end
+end
diff --git a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
index 74429e498df..dd86989912f 100644
--- a/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
+++ b/spec/migrations/associate_existing_dast_builds_with_variables_spec.rb
@@ -4,73 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe AssociateExistingDastBuildsWithVariables do
- subject(:migration) { described_class.new }
-
- let_it_be(:namespaces_table) { table(:namespaces) }
- let_it_be(:projects_table) { table(:projects) }
- let_it_be(:ci_pipelines_table) { table(:ci_pipelines) }
- let_it_be(:ci_builds_table) { table(:ci_builds) }
- let_it_be(:dast_sites_table) { table(:dast_sites) }
- let_it_be(:dast_site_profiles_table) { table(:dast_site_profiles) }
- let_it_be(:dast_scanner_profiles_table) { table(:dast_scanner_profiles) }
- let_it_be(:dast_site_profiles_builds_table) { table(:dast_site_profiles_builds) }
- let_it_be(:dast_profiles_table) { table(:dast_profiles) }
- let_it_be(:dast_profiles_pipelines_table) { table(:dast_profiles_pipelines) }
-
- let!(:group) { namespaces_table.create!(type: 'Group', name: 'group', path: 'group') }
- let!(:project) { projects_table.create!(name: 'project', path: 'project', namespace_id: group.id) }
-
- let!(:pipeline_0) { ci_pipelines_table.create!(project_id: project.id, source: 13) }
- let!(:pipeline_1) { ci_pipelines_table.create!(project_id: project.id, source: 13) }
- let!(:build_0) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) }
- let!(:build_1) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_0.id, name: :dast, stage: :dast) }
- let!(:build_2) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast, stage: :dast) }
- let!(:build_3) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, name: :dast) }
- let!(:build_4) { ci_builds_table.create!(project_id: project.id, commit_id: pipeline_1.id, stage: :dast) }
-
- let!(:dast_site) { dast_sites_table.create!(project_id: project.id, url: generate(:url)) }
- let!(:dast_site_profile) { dast_site_profiles_table.create!(project_id: project.id, dast_site_id: dast_site.id, name: SecureRandom.hex) }
- let!(:dast_scanner_profile) { dast_scanner_profiles_table.create!(project_id: project.id, name: SecureRandom.hex) }
-
- let!(:dast_profile) do
- dast_profiles_table.create!(
- project_id: project.id,
- dast_site_profile_id: dast_site_profile.id,
- dast_scanner_profile_id: dast_scanner_profile.id,
- name: SecureRandom.hex,
- description: SecureRandom.hex
- )
- end
-
- let!(:dast_profiles_pipeline_0) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_0.id) }
- let!(:dast_profiles_pipeline_1) { dast_profiles_pipelines_table.create!(dast_profile_id: dast_profile.id, ci_pipeline_id: pipeline_1.id) }
-
- context 'when there are ci_pipelines with associated dast_profiles' do
- describe 'migration up' do
- it 'adds association of dast_site_profiles to ci_builds', :aggregate_failures do
- expect(dast_site_profiles_builds_table.all).to be_empty
-
- migration.up
-
- expected_results = [
- [dast_site_profile.id, build_0.id],
- [dast_site_profile.id, build_1.id],
- [dast_site_profile.id, build_2.id]
- ]
-
- expect(dast_site_profiles_builds_table.all.map { |assoc| [assoc.dast_site_profile_id, assoc.ci_build_id] }).to contain_exactly(*expected_results)
- end
- end
- end
-
- describe 'migration down' do
- it 'deletes all records in the dast_site_profiles_builds table', :aggregate_failures do
- expect(dast_site_profiles_builds_table.all).to be_empty
-
- migration.up
- migration.down
-
- expect(dast_site_profiles_builds_table.all).to be_empty
- end
+ it 'is a no-op' do
+ migrate!
end
end
diff --git a/spec/migrations/backfill_project_import_level_spec.rb b/spec/migrations/backfill_project_import_level_spec.rb
new file mode 100644
index 00000000000..c24ddac0730
--- /dev/null
+++ b/spec/migrations/backfill_project_import_level_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillProjectImportLevel do
+ let_it_be(:batched_migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of namespaces' do
+ migrate!
+
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :namespaces,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/change_public_projects_cost_factor_spec.rb b/spec/migrations/change_public_projects_cost_factor_spec.rb
index 78030736093..039edda750b 100644
--- a/spec/migrations/change_public_projects_cost_factor_spec.rb
+++ b/spec/migrations/change_public_projects_cost_factor_spec.rb
@@ -3,16 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe ChangePublicProjectsCostFactor, :migration do
- # This is a workaround to force the migration to run against the
- # `gitlab_ci` schema. Otherwise it only runs against `gitlab_main`.
- around do |example| # rubocop: disable Style/MultilineIfModifier
- with_reestablished_active_record_base do
- reconfigure_db_connection(name: :ci)
- example.run
- end
- end if Gitlab::Database.has_config?(:ci)
-
+RSpec.describe ChangePublicProjectsCostFactor, migration: :gitlab_ci do
let(:runners) { table(:ci_runners) }
let!(:shared_1) { runners.create!(runner_type: 1, public_projects_minutes_cost_factor: 0) }
diff --git a/spec/migrations/clean_up_pending_builds_table_spec.rb b/spec/migrations/clean_up_pending_builds_table_spec.rb
index 9c8d4413337..17e62e1b486 100644
--- a/spec/migrations/clean_up_pending_builds_table_spec.rb
+++ b/spec/migrations/clean_up_pending_builds_table_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe CleanUpPendingBuildsTable do
+RSpec.describe CleanUpPendingBuildsTable, :suppress_gitlab_schemas_validate_connection do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:queue) { table(:ci_pending_builds) }
diff --git a/spec/migrations/cleanup_mr_attention_request_todos_spec.rb b/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
new file mode 100644
index 00000000000..9f593ca8292
--- /dev/null
+++ b/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupMrAttentionRequestTodos, :migration do
+ let(:projects) { table(:projects) }
+ let(:namespaces) { table(:namespaces) }
+ let(:users) { table(:users) }
+ let(:todos) { table(:todos) }
+
+ let(:author) { users.create!(projects_limit: 1) }
+ let(:namespace) { namespaces.create!(name: 'test', path: 'test') }
+ let(:project) do
+ projects.create!(
+ namespace_id: namespace.id,
+ project_namespace_id: namespace.id,
+ name: 'test-project'
+ )
+ end
+
+ let(:attention_requested) { 10 }
+ let(:todo_attrs) do
+ {
+ project_id: project.id,
+ author_id: author.id,
+ user_id: author.id,
+ target_type: 'TestType',
+ state: 'pending'
+ }
+ end
+
+ let!(:todo1) { todos.create!(todo_attrs.merge(action: Todo::ASSIGNED)) }
+ let!(:todo2) { todos.create!(todo_attrs.merge(action: Todo::MENTIONED)) }
+ let!(:todo3) { todos.create!(todo_attrs.merge(action: Todo::REVIEW_REQUESTED)) }
+ let!(:todo4) { todos.create!(todo_attrs.merge(action: attention_requested)) }
+ let!(:todo5) { todos.create!(todo_attrs.merge(action: attention_requested)) }
+
+ describe '#up' do
+ it 'clean up attention request todos' do
+ expect { migrate! }.to change(todos, :count).by(-2)
+
+ expect(todos.all).to include(todo1, todo2, todo3)
+ end
+ end
+end
diff --git a/spec/migrations/delete_security_findings_without_uuid_spec.rb b/spec/migrations/delete_security_findings_without_uuid_spec.rb
index b32ea89f8aa..bfd89f1aa82 100644
--- a/spec/migrations/delete_security_findings_without_uuid_spec.rb
+++ b/spec/migrations/delete_security_findings_without_uuid_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe DeleteSecurityFindingsWithoutUuid do
+RSpec.describe DeleteSecurityFindingsWithoutUuid, :suppress_gitlab_schemas_validate_connection do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/disable_job_token_scope_when_unused_spec.rb b/spec/migrations/disable_job_token_scope_when_unused_spec.rb
index d969c98aa0f..3ce4ef5c102 100644
--- a/spec/migrations/disable_job_token_scope_when_unused_spec.rb
+++ b/spec/migrations/disable_job_token_scope_when_unused_spec.rb
@@ -4,41 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe DisableJobTokenScopeWhenUnused do
- let(:ci_cd_settings) { table(:project_ci_cd_settings) }
- let(:links) { table(:ci_job_token_project_scope_links) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- let(:namespace) { namespaces.create!(name: 'test', path: 'path', type: 'Group') }
-
- let(:project_with_used_scope) { projects.create!(namespace_id: namespace.id) }
- let!(:used_scope_settings) { ci_cd_settings.create!(project_id: project_with_used_scope.id, job_token_scope_enabled: true) }
- let(:target_project) { projects.create!(namespace_id: namespace.id) }
- let!(:link) { links.create!(source_project_id: project_with_used_scope.id, target_project_id: target_project.id) }
-
- let(:project_with_unused_scope) { projects.create!(namespace_id: namespace.id) }
- let!(:unused_scope_settings) { ci_cd_settings.create!(project_id: project_with_unused_scope.id, job_token_scope_enabled: true) }
-
- let(:project_with_disabled_scope) { projects.create!(namespace_id: namespace.id) }
- let!(:disabled_scope_settings) { ci_cd_settings.create!(project_id: project_with_disabled_scope.id, job_token_scope_enabled: false) }
-
- describe '#up' do
- it 'sets job_token_scope_enabled to false for projects not having job token scope configured' do
- migrate!
-
- expect(unused_scope_settings.reload.job_token_scope_enabled).to be_falsey
- end
-
- it 'keeps the scope enabled for projects that are using it' do
- migrate!
-
- expect(used_scope_settings.reload.job_token_scope_enabled).to be_truthy
- end
-
- it 'keeps the scope disabled for projects having it disabled' do
- migrate!
-
- expect(disabled_scope_settings.reload.job_token_scope_enabled).to be_falsey
- end
+ it 'is a no-op' do
+ migrate!
end
end
diff --git a/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb b/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb
index 2108adcc973..01805a9eb79 100644
--- a/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb
+++ b/spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe MigrateProtectedAttributeToPendingBuilds do
+RSpec.describe MigrateProtectedAttributeToPendingBuilds, :suppress_gitlab_schemas_validate_connection do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:queue) { table(:ci_pending_builds) }
diff --git a/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
index 8a9b993b869..45a2772adda 100644
--- a/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
+++ b/spec/migrations/re_schedule_latest_pipeline_id_population_with_all_security_related_artifact_types_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
require_migration!
-RSpec.describe ReScheduleLatestPipelineIdPopulationWithAllSecurityRelatedArtifactTypes do
+RSpec.describe ReScheduleLatestPipelineIdPopulationWithAllSecurityRelatedArtifactTypes,
+ :suppress_gitlab_schemas_validate_connection do
let(:namespaces) { table(:namespaces) }
let(:pipelines) { table(:ci_pipelines) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb b/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb
new file mode 100644
index 00000000000..e03096de98d
--- /dev/null
+++ b/spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe ScheduleBackfillingTheNamespaceIdForVulnerabilityReads do
+ let_it_be(:migration) { described_class::MIGRATION_NAME }
+
+ describe '#up' do
+ it 'schedules background jobs for each batch of vulnerabilities' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :vulnerability_reads,
+ column_name: :vulnerability_id,
+ interval: 2.minutes,
+ batch_size: 10_000,
+ sub_batch_size: 200
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
index 012c7d065fc..67d54ea92a0 100644
--- a/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
+++ b/spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb
@@ -4,49 +4,7 @@ require 'spec_helper'
require_migration!
RSpec.describe ScheduleCopyCiBuildsColumnsToSecurityScans2 do
- let_it_be(:namespaces) { table(:namespaces) }
- let_it_be(:projects) { table(:projects) }
- let_it_be(:ci_pipelines) { table(:ci_pipelines) }
- let_it_be(:ci_builds) { table(:ci_builds) }
- let_it_be(:security_scans) { table(:security_scans) }
- let_it_be(:background_migration_jobs) { table(:background_migration_jobs) }
-
- let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let!(:project) { projects.create!(namespace_id: namespace.id) }
- let!(:pipeline) { ci_pipelines.create!(status: "success")}
-
- let!(:build1) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
- let!(:build2) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
- let!(:build3) { ci_builds.create!(commit_id: pipeline.id, type: 'Ci::Build', project_id: project.id) }
-
- let!(:scan1) { security_scans.create!(build_id: build1.id, scan_type: 1) }
- let!(:scan2) { security_scans.create!(build_id: build2.id, scan_type: 1) }
- let!(:scan3) { security_scans.create!(build_id: build3.id, scan_type: 1) }
-
- let!(:job_class_name) { described_class::MIGRATION }
- let!(:tracked_pending_job) { background_migration_jobs.create!(class_name: job_class_name, status: 0, arguments: [1]) }
- let!(:tracked_successful_job) { background_migration_jobs.create!(class_name: job_class_name, status: 1, arguments: [2]) }
- let(:jobs) { Gitlab::Database::BackgroundMigrationJob.where(id: [tracked_pending_job.id, tracked_successful_job.id] ).for_migration_class(job_class_name) }
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 2)
- allow_next_instance_of(Gitlab::BackgroundMigration::CopyCiBuildsColumnsToSecurityScans) do |instance|
- allow(instance).to receive(:mark_job_as_succeeded)
- end
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations', :aggregate_failures do
- expect(jobs).not_to be_empty
-
+ it 'is a no-op' do
migrate!
-
- expect(jobs).to be_empty
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, scan1.id, scan2.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, scan3.id, scan3.id)
end
end
diff --git a/spec/migrations/schedule_migrate_shared_vulnerability_scanners_spec.rb b/spec/migrations/schedule_migrate_shared_vulnerability_scanners_spec.rb
new file mode 100644
index 00000000000..f00d6568b67
--- /dev/null
+++ b/spec/migrations/schedule_migrate_shared_vulnerability_scanners_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+require_migration!
+
+RSpec.describe ScheduleMigrateSharedVulnerabilityScanners, :migration do
+ describe "#up" do
+ before do
+ migrate!
+ end
+
+ it "schedules" do
+ expect(described_class::MIGRATION).to have_scheduled_batched_migration(
+ table_name: described_class::TABLE_NAME,
+ column_name: described_class::BATCH_COLUMN,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE,
+ gitlab_schema: :gitlab_main
+ )
+ end
+
+ describe "ID range" do
+ let(:expected_range) do
+ { min_value: described_class::BATCH_MIN_VALUE,
+ max_value: described_class::BATCH_MAX_VALUE }
+ end
+
+ subject do
+ Gitlab::Database::BackgroundMigration::BatchedMigration
+ .for_configuration(:gitlab_main,
+ described_class::MIGRATION,
+ described_class::TABLE_NAME,
+ described_class::BATCH_COLUMN,
+ [])
+ end
+
+ it "is set" do
+ # The `have_scheduled_batched_migration` matcher accepts the
+ # `batch_min_value` and `batch_max_value` keywords. However the respective
+ # column names are `min_value` and `max_value`. Hence the matcher cannot
+ # be used in this case, as it asserts the wrong attributes.
+ expect(subject).to all(have_attributes(expected_range))
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ schema_migrate_down!
+ end
+
+ it "deletes" do
+ expect(described_class::MIGRATION).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb b/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb
deleted file mode 100644
index 601935db8db..00000000000
--- a/spec/migrations/schedule_populate_status_column_of_security_scans_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe SchedulePopulateStatusColumnOfSecurityScans do
- before do
- allow(Gitlab).to receive(:ee?).and_return(ee?)
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
- end
-
- context 'when the Gitlab instance is CE' do
- let(:ee?) { false }
-
- it 'does not run the migration' do
- expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
- end
- end
-
- context 'when the Gitlab instance is EE' do
- let(:ee?) { true }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:pipelines) { table(:ci_pipelines) }
- let(:builds) { table(:ci_builds) }
- let(:security_scans) { table(:security_scans) }
-
- let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
- let(:project) { projects.create!(namespace_id: namespace.id) }
- let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
- let(:ci_build) { builds.create!(commit_id: pipeline.id, retried: false, type: 'Ci::Build') }
-
- let!(:security_scan_1) { security_scans.create!(build_id: ci_build.id, scan_type: 1) }
- let!(:security_scan_2) { security_scans.create!(build_id: ci_build.id, scan_type: 2) }
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules the background jobs', :aggregate_failures do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to be(2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, security_scan_1.id, security_scan_1.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, security_scan_2.id, security_scan_2.id)
- end
- end
-end
diff --git a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb b/spec/migrations/start_backfill_ci_queuing_tables_spec.rb
index a1e4179efb6..08fd244089f 100644
--- a/spec/migrations/start_backfill_ci_queuing_tables_spec.rb
+++ b/spec/migrations/start_backfill_ci_queuing_tables_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe StartBackfillCiQueuingTables do
+RSpec.describe StartBackfillCiQueuingTables, :suppress_gitlab_schemas_validate_connection do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:builds) { table(:ci_builds) }
diff --git a/spec/models/active_session_spec.rb b/spec/models/active_session_spec.rb
index 751d31ad95a..5d316f7cff2 100644
--- a/spec/models/active_session_spec.rb
+++ b/spec/models/active_session_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe ActiveSession, :clean_gitlab_redis_sessions do
expect(Gitlab::Redis::Sessions).to receive(:with).and_yield(redis)
sessions = %w[session-a session-b]
- mget_responses = sessions.map { |session| [Marshal.dump(session)]}
+ mget_responses = sessions.map { |session| [Marshal.dump(session)] }
expect(redis).to receive(:mget).twice.times.and_return(*mget_responses)
expect(ActiveSession.sessions_from_ids([1, 2])).to eql(sessions)
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 0b3521cdd0c..16e1d8fbc4d 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -116,6 +116,7 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_presence_of(:max_yaml_depth) }
it { is_expected.to validate_numericality_of(:max_yaml_depth).only_integer.is_greater_than(0) }
it { is_expected.to validate_presence_of(:max_pages_size) }
+
it 'ensures max_pages_size is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than_or_equal_to(0)
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
@@ -1438,4 +1439,10 @@ RSpec.describe ApplicationSetting do
end
end
end
+
+ context 'personal accesss token prefix' do
+ it 'sets the correct default value' do
+ expect(setting.personal_access_token_prefix).to eql('glpat-')
+ end
+ end
end
diff --git a/spec/models/aws/role_spec.rb b/spec/models/aws/role_spec.rb
index ee93c9d6fad..f23f18ab44f 100644
--- a/spec/models/aws/role_spec.rb
+++ b/spec/models/aws/role_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Aws::Role do
end
context 'ARN is nil' do
- let(:role_arn) { }
+ let(:role_arn) {}
it { is_expected.to be_truthy }
end
diff --git a/spec/models/board_spec.rb b/spec/models/board_spec.rb
index 775cccd2aec..6017298e85b 100644
--- a/spec/models/board_spec.rb
+++ b/spec/models/board_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Board do
- let(:project) { create(:project) }
- let(:other_project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
describe 'relationships' do
it { is_expected.to belong_to(:project) }
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 1d2ad8b4dce..02c38479d1a 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe ChatName do
it { is_expected.to validate_presence_of(:team_id) }
it { is_expected.to validate_presence_of(:chat_id) }
- it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:service_id) }
- it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:service_id, :team_id) }
+ it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:integration_id) }
+ it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:integration_id, :team_id) }
it 'is removed when the project is deleted' do
expect { subject.reload.integration.project.delete }.to change { ChatName.count }.by(-1)
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index cb29cce554f..40c2d62c465 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -25,6 +25,8 @@ RSpec.describe Ci::Bridge do
expect(bridge).to have_many(:sourced_pipelines)
end
+ it_behaves_like 'has ID tokens', :ci_bridge
+
it 'has one downstream pipeline' do
expect(bridge).to have_one(:sourced_pipeline)
expect(bridge).to have_one(:downstream_pipeline)
@@ -401,6 +403,18 @@ RSpec.describe Ci::Bridge do
end
end
+ describe '#downstream_project_path' do
+ context 'when trigger is defined' do
+ context 'when using variable expansion' do
+ let(:options) { { trigger: { project: 'my/$BRIDGE/project' } } }
+
+ it 'correctly expands variables' do
+ expect(bridge.downstream_project_path).to eq('my/cross/project')
+ end
+ end
+ end
+ end
+
describe '#target_ref' do
context 'when trigger is defined' do
it 'returns a ref name' do
diff --git a/spec/models/ci/build_dependencies_spec.rb b/spec/models/ci/build_dependencies_spec.rb
index 91048cae064..737348765d9 100644
--- a/spec/models/ci/build_dependencies_spec.rb
+++ b/spec/models/ci/build_dependencies_spec.rb
@@ -76,8 +76,8 @@ RSpec.describe Ci::BuildDependencies do
end
describe 'jobs from specified dependencies' do
- let(:dependencies) { }
- let(:needs) { }
+ let(:dependencies) {}
+ let(:needs) {}
let!(:job) do
scheduling_type = needs.present? ? :dag : :stage
diff --git a/spec/models/ci/build_metadata_spec.rb b/spec/models/ci/build_metadata_spec.rb
index 5e30f9160cd..e904463a5ca 100644
--- a/spec/models/ci/build_metadata_spec.rb
+++ b/spec/models/ci/build_metadata_spec.rb
@@ -105,6 +105,13 @@ RSpec.describe Ci::BuildMetadata do
}
}
}
+ metadata.id_tokens = {
+ TEST_JWT_TOKEN: {
+ id_token: {
+ aud: 'https://gitlab.test'
+ }
+ }
+ }
expect(metadata).to be_valid
end
@@ -113,10 +120,14 @@ RSpec.describe Ci::BuildMetadata do
context 'when data is invalid' do
it 'returns errors' do
metadata.secrets = { DATABASE_PASSWORD: { vault: {} } }
+ metadata.id_tokens = { TEST_JWT_TOKEN: { id_token: { aud: nil } } }
aggregate_failures do
expect(metadata).to be_invalid
- expect(metadata.errors.full_messages).to eq(["Secrets must be a valid json schema"])
+ expect(metadata.errors.full_messages).to contain_exactly(
+ 'Secrets must be a valid json schema',
+ 'Id tokens must be a valid json schema'
+ )
end
end
end
diff --git a/spec/models/ci/build_runner_session_spec.rb b/spec/models/ci/build_runner_session_spec.rb
index 601c6ad26f9..ed5ed456d7b 100644
--- a/spec/models/ci/build_runner_session_spec.rb
+++ b/spec/models/ci/build_runner_session_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Ci::BuildRunnerSession, model: true do
end
describe '#service_specification' do
- let(:service) { 'foo'}
+ let(:service) { 'foo' }
let(:port) { 80 }
let(:path) { 'path' }
let(:subprotocols) { nil }
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index e0166ba64a4..b865688d370 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe Ci::Build do
+ include Ci::TemplateHelpers
+ include AfterNextHelpers
+
let_it_be(:user) { create(:user) }
let_it_be(:group, reload: true) { create(:group) }
let_it_be(:project, reload: true) { create(:project, :repository, group: group) }
@@ -59,11 +62,36 @@ RSpec.describe Ci::Build do
describe 'callbacks' do
context 'when running after_create callback' do
- it 'triggers asynchronous build hooks worker' do
- expect(BuildHooksWorker).to receive(:perform_async)
+ it 'executes hooks' do
+ expect_next(described_class).to receive(:execute_hooks)
create(:ci_build)
end
+
+ context 'when the execute_build_hooks_inline flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'uses the old job hooks worker' do
+ expect(::BuildHooksWorker).to receive(:perform_async).with(Ci::Build)
+
+ create(:ci_build)
+ end
+ end
+
+ context 'when the execute_build_hooks_inline flag is enabled for a project' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: project)
+ end
+
+ it 'executes hooks inline' do
+ expect(::BuildHooksWorker).not_to receive(:perform_async)
+ expect_next(described_class).to receive(:execute_hooks)
+
+ create(:ci_build, project: project)
+ end
+ end
end
end
@@ -81,6 +109,8 @@ RSpec.describe Ci::Build do
end
end
+ it_behaves_like 'has ID tokens', :ci_build
+
describe '.manual_actions' do
let!(:manual_but_created) { create(:ci_build, :manual, status: :created, pipeline: pipeline) }
let!(:manual_but_succeeded) { create(:ci_build, :manual, status: :success, pipeline: pipeline) }
@@ -1289,7 +1319,7 @@ RSpec.describe Ci::Build do
let(:subject) { build.hide_secrets(data) }
context 'hide runners token' do
- let(:data) { "new #{project.runners_token} data"}
+ let(:data) { "new #{project.runners_token} data" }
it { is_expected.to match(/^new x+ data$/) }
@@ -1303,7 +1333,7 @@ RSpec.describe Ci::Build do
end
context 'hide build token' do
- let(:data) { "new #{build.token} data"}
+ let(:data) { "new #{build.token} data" }
it { is_expected.to match(/^new x+ data$/) }
@@ -1335,6 +1365,43 @@ RSpec.describe Ci::Build do
end
end
+ describe 'state transition metrics' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { build.send(event) }
+
+ where(:ff_enabled, :state, :report_count, :trait) do
+ true | :success! | 1 | :sast
+ true | :cancel! | 1 | :sast
+ true | :drop! | 2 | :multiple_report_artifacts
+ true | :success! | 0 | :allowed_to_fail
+ true | :skip! | 0 | :pending
+ false | :success! | 0 | :sast
+ end
+
+ with_them do
+ let(:build) { create(:ci_build, trait, project: project, pipeline: pipeline) }
+ let(:event) { state }
+
+ context "when transitioning to #{params[:state]}" do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ stub_feature_flags(report_artifact_build_completed_metrics_on_build_completion: ff_enabled)
+ end
+
+ it 'increments build_completed_report_type metric' do
+ expect(
+ ::Gitlab::Ci::Artifacts::Metrics
+ ).to receive(
+ :build_completed_report_type_counter
+ ).exactly(report_count).times.and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+
describe 'state transition as a deployable' do
subject { build.send(event) }
@@ -1518,8 +1585,8 @@ RSpec.describe Ci::Build do
end
end
- describe '#environment_deployment_tier' do
- subject { build.environment_deployment_tier }
+ describe '#environment_tier_from_options' do
+ subject { build.environment_tier_from_options }
let(:build) { described_class.new(options: options) }
let(:options) { { environment: { deployment_tier: 'production' } } }
@@ -1533,6 +1600,30 @@ RSpec.describe Ci::Build do
end
end
+ describe '#environment_tier' do
+ subject { build.environment_tier }
+
+ let(:options) { { environment: { deployment_tier: 'production' } } }
+ let!(:environment) { create(:environment, name: 'production', tier: 'development', project: project) }
+ let(:build) { described_class.new(options: options, environment: 'production', project: project) }
+
+ it { is_expected.to eq('production') }
+
+ context 'when options does not include deployment_tier' do
+ let(:options) { { environment: { name: 'production' } } }
+
+ it 'uses tier from environment' do
+ is_expected.to eq('development')
+ end
+
+ context 'when persisted environment is absent' do
+ let(:environment) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+
describe 'environment' do
describe '#has_environment?' do
subject { build.has_environment? }
@@ -1601,20 +1692,18 @@ RSpec.describe Ci::Build do
end
it 'returns an expanded environment name with a list of variables' do
- expect(build).to receive(:simple_variables).once.and_call_original
-
is_expected.to eq('review/host')
end
context 'when build metadata has already persisted the expanded environment name' do
before do
- build.metadata.expanded_environment_name = 'review/host'
+ build.metadata.expanded_environment_name = 'review/foo'
end
it 'returns a persisted expanded environment name without a list of variables' do
expect(build).not_to receive(:simple_variables)
- is_expected.to eq('review/host')
+ is_expected.to eq('review/foo')
end
end
end
@@ -1642,14 +1731,6 @@ RSpec.describe Ci::Build do
end
it { is_expected.to eq('review/master') }
-
- context 'when the FF ci_expand_environment_name_and_url is disabled' do
- before do
- stub_feature_flags(ci_expand_environment_name_and_url: false)
- end
-
- it { is_expected.to eq('review/${CI_COMMIT_REF_NAME}') }
- end
end
end
@@ -1693,7 +1774,7 @@ RSpec.describe Ci::Build do
end
context 'with a dynamic value' do
- let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME'}
+ let(:namespace) { 'deploy-$CI_COMMIT_REF_NAME' }
it { is_expected.to eq 'deploy-master' }
end
@@ -1806,6 +1887,21 @@ RSpec.describe Ci::Build do
end
context 'build is erasable' do
+ context 'logging erase' do
+ let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
+
+ it 'logs erased artifacts' do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_deleted)
+ .with(
+ match_array(build.job_artifacts.to_a),
+ 'Ci::Build#erase'
+ )
+
+ build.erase
+ end
+ end
+
context 'when project is undergoing stats refresh' do
let!(:build) { create(:ci_build, :test_reports, :trace_artifact, :success, :artifacts) }
@@ -1908,7 +2004,14 @@ RSpec.describe Ci::Build do
end
end
- it "erases erasable artifacts" do
+ it "erases erasable artifacts and logs them" do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_deleted)
+ .with(
+ match_array(build.job_artifacts.erasable.to_a),
+ 'Ci::Build#erase_erasable_artifacts!'
+ )
+
subject
expect(build.job_artifacts.erasable).to be_empty
@@ -2627,7 +2730,7 @@ RSpec.describe Ci::Build do
build.update_columns(token_encrypted: nil)
end
- it { is_expected.to be_nil}
+ it { is_expected.to be_nil }
end
end
@@ -2812,6 +2915,7 @@ RSpec.describe Ci::Build do
public: true,
masked: false },
{ key: 'CI_API_V4_URL', value: 'http://localhost/api/v4', public: true, masked: false },
+ { key: 'CI_TEMPLATE_REGISTRY_HOST', value: template_registry_host, public: true, masked: false },
{ key: 'CI_PIPELINE_IID', value: pipeline.iid.to_s, public: true, masked: false },
{ key: 'CI_PIPELINE_SOURCE', value: pipeline.source, public: true, masked: false },
{ key: 'CI_PIPELINE_CREATED_AT', value: pipeline.created_at.iso8601, public: true, masked: false },
@@ -2929,7 +3033,7 @@ RSpec.describe Ci::Build do
let(:expected_variables) do
predefined_variables.map { |variable| variable.fetch(:key) } +
%w[YAML_VARIABLE CI_ENVIRONMENT_NAME CI_ENVIRONMENT_SLUG
- CI_ENVIRONMENT_TIER CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_URL]
+ CI_ENVIRONMENT_ACTION CI_ENVIRONMENT_TIER CI_ENVIRONMENT_URL]
end
before do
@@ -3096,6 +3200,16 @@ RSpec.describe Ci::Build do
end
end
+ context 'when environment_tier is updated in options' do
+ before do
+ build.update!(options: { environment: { name: 'production', deployment_tier: 'development' } })
+ end
+
+ it 'uses tier from options' do
+ is_expected.to include({ key: 'CI_ENVIRONMENT_TIER', value: 'development', public: true, masked: false })
+ end
+ end
+
context 'when project has an environment specific variable' do
let(:environment_specific_variable) do
{ key: 'MY_STAGING_ONLY_VARIABLE', value: 'environment_specific_variable', public: false, masked: false }
@@ -3508,8 +3622,8 @@ RSpec.describe Ci::Build do
context 'when gitlab-deploy-token does not exist for project' do
it 'does not include deploy token variables' do
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD' }).to be_nil
end
context 'when gitlab-deploy-token exists for group' do
@@ -3527,8 +3641,8 @@ RSpec.describe Ci::Build do
end
it 'does not include deploy token variables' do
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER'}).to be_nil
- expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_USER' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'CI_DEPLOY_PASSWORD' }).to be_nil
end
end
end
@@ -3559,10 +3673,10 @@ RSpec.describe Ci::Build do
context 'when harbor_integration does not exist' do
it 'does not include harbor variables' do
- expect(subject.find { |v| v[:key] == 'HARBOR_URL'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_PROJECT_NAME'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_USERNAME'}).to be_nil
- expect(subject.find { |v| v[:key] == 'HARBOR_PASSWORD'}).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_URL' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PROJECT_NAME' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_USERNAME' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'HARBOR_PASSWORD' }).to be_nil
end
end
end
@@ -3807,8 +3921,20 @@ RSpec.describe Ci::Build do
build.enqueue
end
- it 'queues BuildHooksWorker' do
- expect(BuildHooksWorker).to receive(:perform_async).with(build)
+ context 'when the execute_build_hooks_inline flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'queues BuildHooksWorker' do
+ expect(BuildHooksWorker).to receive(:perform_async).with(build)
+
+ build.enqueue
+ end
+ end
+
+ it 'executes hooks' do
+ expect(build).to receive(:execute_hooks)
build.enqueue
end
@@ -4526,7 +4652,7 @@ RSpec.describe Ci::Build do
end
describe '#each_report' do
- let(:report_types) { Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES }
+ let(:report_types) { Ci::JobArtifact.file_types_for_report(:coverage) }
let!(:codequality) { create(:ci_job_artifact, :codequality, job: build) }
let!(:coverage) { create(:ci_job_artifact, :coverage_gocov_xml, job: build) }
@@ -4559,6 +4685,7 @@ RSpec.describe Ci::Build do
end
before do
+ allow(build).to receive(:execute_hooks)
stub_artifacts_object_storage
end
@@ -5499,7 +5626,7 @@ RSpec.describe Ci::Build do
build.cancel_gracefully?
end
- let_it_be(:build) { create(:ci_build, pipeline: pipeline) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
it 'cannot cancel gracefully' do
expect(subject).to be false
@@ -5520,4 +5647,58 @@ RSpec.describe Ci::Build do
let!(:model) { create(:ci_build, user: create(:user)) }
let!(:parent) { model.user }
end
+
+ describe '#clone' do
+ let_it_be(:user) { FactoryBot.build(:user) }
+
+ context 'when given new job variables' do
+ context 'when the cloned build has an action' do
+ it 'applies the new job variables' do
+ build = create(:ci_build, :actionable)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+ create(:ci_job_variable, job: build, key: 'OLD_KEY', value: 'i will not live for long')
+
+ new_build = build.clone(current_user: user, new_job_variables_attributes: [
+ { key: 'TEST_KEY', value: 'new value' },
+ { key: 'NEW_KEY', value: 'exciting new value' }
+ ])
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(2)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY', 'NEW_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('new value', 'exciting new value')
+ end
+ end
+
+ context 'when the cloned build does not have an action' do
+ it 'applies the old job variables' do
+ build = create(:ci_build)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+
+ new_build = build.clone(current_user: user, new_job_variables_attributes: [
+ { key: 'TEST_KEY', value: 'new value' }
+ ])
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(1)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('old value')
+ end
+ end
+ end
+
+ context 'when not given new job variables' do
+ it 'applies the old job variables' do
+ build = create(:ci_build)
+ create(:ci_job_variable, job: build, key: 'TEST_KEY', value: 'old value')
+
+ new_build = build.clone(current_user: user)
+ new_build.save!
+
+ expect(new_build.job_variables.count).to be(1)
+ expect(new_build.job_variables.pluck(:key)).to contain_exactly('TEST_KEY')
+ expect(new_build.job_variables.map(&:value)).to contain_exactly('old value')
+ end
+ end
+ end
end
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index 43ba4c32477..d0141a1469e 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Ci::DailyBuildGroupReportResult do
- let(:daily_build_group_report_result) { build(:ci_daily_build_group_report_result)}
+ let(:daily_build_group_report_result) { build(:ci_daily_build_group_report_result) }
describe 'associations' do
it { is_expected.to belong_to(:last_pipeline) }
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index b9cac6c3f99..b996bf84529 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -128,6 +128,18 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '.file_types_for_report' do
+ it 'returns the report file types for the report type' do
+ expect(described_class.file_types_for_report(:test)).to match_array(%w[junit])
+ end
+
+ context 'when given an unrecognized report type' do
+ it 'raises error' do
+ expect { described_class.file_types_for_report(:blah) }.to raise_error(KeyError, /blah/)
+ end
+ end
+ end
+
describe '.associated_file_types_for' do
using RSpec::Parameterized::TableSyntax
@@ -193,7 +205,7 @@ RSpec.describe Ci::JobArtifact do
it { is_expected.to be_truthy }
context 'when the job does have archived trace' do
- let!(:artifact) { }
+ let!(:artifact) {}
it { is_expected.to be_falsy }
end
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index 3c295fb345b..b28b61e2b39 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe Ci::PipelineSchedule do
end
context 'when there are no runnable schedules' do
- let!(:pipeline_schedule) { }
+ let!(:pipeline_schedule) {}
it 'returns an empty array' do
is_expected.to be_empty
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 6a71b2cfbed..0c28c99c113 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -844,6 +844,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it 'has 8 items' do
expect(subject.size).to eq(8)
end
+
it { expect(pipeline.sha).to start_with(subject) }
end
@@ -2162,6 +2163,60 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#modified_paths_since' do
+ let(:project) do
+ create(:project, :custom_repo,
+ files: { 'file1.txt' => 'file 1' })
+ end
+
+ let(:user) { project.owner }
+ let(:main_branch) { project.default_branch }
+ let(:new_branch) { 'feature_x' }
+ let(:pipeline) { build(:ci_pipeline, project: project, sha: new_branch) }
+
+ subject(:modified_paths_since) { pipeline.modified_paths_since(main_branch) }
+
+ before do
+ project.repository.add_branch(user, new_branch, main_branch)
+ end
+
+ context 'when no change in the new branch' do
+ it 'returns an empty array' do
+ expect(modified_paths_since).to be_empty
+ end
+ end
+
+ context 'when adding a new file' do
+ before do
+ project.repository.create_file(user, 'file2.txt', 'file 2', message: 'Create file2.txt', branch_name: new_branch)
+ end
+
+ it 'returns the new file path' do
+ expect(modified_paths_since).to eq(['file2.txt'])
+ end
+
+ context 'and when updating an existing file' do
+ before do
+ project.repository.update_file(user, 'file1.txt', 'file 1 updated', message: 'Update file1.txt', branch_name: new_branch)
+ end
+
+ it 'returns the new and updated file paths' do
+ expect(modified_paths_since).to eq(['file1.txt', 'file2.txt'])
+ end
+ end
+ end
+
+ context 'when updating an existing file' do
+ before do
+ project.repository.update_file(user, 'file1.txt', 'file 1 updated', message: 'Update file1.txt', branch_name: new_branch)
+ end
+
+ it 'returns the updated file path' do
+ expect(modified_paths_since).to eq(['file1.txt'])
+ end
+ end
+ end
+
describe '#all_worktree_paths' do
let(:files) { { 'main.go' => '', 'mocks/mocks.go' => '' } }
let(:project) { create(:project, :custom_repo, files: files) }
@@ -2866,7 +2921,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
describe '#cancel_running' do
- subject(:latest_status) { pipeline.statuses.pluck(:status) }
+ let(:latest_status) { pipeline.statuses.pluck(:status) }
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
@@ -2909,6 +2964,32 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ context 'with bridge jobs' do
+ before do
+ create(:ci_bridge, :created, pipeline: pipeline)
+
+ pipeline.cancel_running
+ end
+
+ it 'bridges are canceled' do
+ expect(pipeline.bridges.first.status).to eq 'canceled'
+ end
+ end
+
+ context 'when pipeline is not cancelable' do
+ before do
+ create(:ci_build, :canceled, stage_idx: 0, pipeline: pipeline)
+
+ pipeline.cancel_running
+ end
+
+ it 'does not send cancel signal to cancel self' do
+ expect(pipeline).not_to receive(:cancel_self_only)
+
+ pipeline.cancel_running
+ end
+ end
+
context 'preloading relations' do
let(:pipeline1) { create(:ci_empty_pipeline, :created) }
let(:pipeline2) { create(:ci_empty_pipeline, :created) }
@@ -2940,37 +3021,211 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
- context 'when the first try cannot get an exclusive lock' do
- let(:retries) { 1 }
+ shared_examples 'retries' do
+ context 'when the first try cannot get an exclusive lock' do
+ let(:retries) { 1 }
- subject(:cancel_running) { pipeline.cancel_running(retries: retries) }
+ subject { pipeline.cancel_running(retries: retries) }
- before do
- build = create(:ci_build, :running, pipeline: pipeline)
+ before do
+ create(:ci_build, :running, pipeline: pipeline)
- allow(pipeline.cancelable_statuses).to receive(:find_in_batches).and_yield([build])
+ stub_first_cancel_call_fails
+ end
+
+ it 'retries again and cancels the build' do
+ subject
+
+ expect(latest_status).to contain_exactly('canceled')
+ end
+ context 'when the retries parameter is 0' do
+ let(:retries) { 0 }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(ActiveRecord::StaleObjectError)
+ end
+ end
+ end
+
+ def stub_first_cancel_call_fails
call_count = 0
- allow(build).to receive(:cancel).and_wrap_original do |original, *args|
- call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
- call_count += 1
+ allow_next_found_instance_of(Ci::Build) do |build|
+ allow(build).to receive(:cancel).and_wrap_original do |original, *args| # rubocop:disable RSpec/AnyInstanceOf
+ call_count >= retries ? raise(ActiveRecord::StaleObjectError) : original.call(*args)
+
+ call_count += 1
+ end
end
end
+ end
+
+ it_behaves_like 'retries'
- it 'retries again and cancels the build' do
- cancel_running
+ context 'when auto canceled' do
+ let!(:canceled_by) { create(:ci_empty_pipeline) }
- expect(latest_status).to contain_exactly('canceled')
+ before do
+ create(:ci_build, :running, pipeline: pipeline)
+
+ pipeline.cancel_running(auto_canceled_by_pipeline_id: canceled_by.id)
+ end
+
+ it 'sets auto cancel' do
+ jobs_canceled_by = pipeline.statuses.map { |s| s.auto_canceled_by.id }
+
+ expect(jobs_canceled_by).to contain_exactly(canceled_by.id)
+ expect(pipeline.auto_canceled_by.id).to eq(canceled_by.id)
end
+ end
- context 'when the retries parameter is 0' do
- let(:retries) { 0 }
+ context 'when there are child pipelines', :sidekiq_inline do
+ let_it_be(:child_pipeline) { create(:ci_empty_pipeline, :created, child_of: pipeline) }
- it 'raises error' do
- expect do
+ before do
+ project.clear_memoization(:cascade_cancel_pipelines_enabled)
+
+ pipeline.reload
+ end
+
+ context 'when cascade_to_children is true' do
+ let(:cascade_to_children) { true }
+ let(:canceled_by) { nil }
+ let(:execute_async) { true }
+
+ let(:params) do
+ {
+ cascade_to_children: cascade_to_children,
+ execute_async: execute_async
+ }.tap do |p|
+ p.merge!(auto_canceled_by_pipeline_id: canceled_by.id) if canceled_by
+ end
+ end
+
+ subject(:cancel_running) { pipeline.cancel_running(**params) }
+
+ context 'when cancelable child pipeline builds' do
+ before do
+ create(:ci_build, :created, pipeline: child_pipeline)
+ create(:ci_build, :running, pipeline: child_pipeline)
+ end
+
+ it 'cancels child builds' do
cancel_running
- end.to raise_error(ActiveRecord::StaleObjectError)
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(canceled canceled)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ it 'cancels bridges' do
+ create(:ci_bridge, :created, pipeline: pipeline)
+ create(:ci_bridge, :created, pipeline: child_pipeline)
+
+ cancel_running
+
+ expect(pipeline.bridges.reload.first.status).to eq 'canceled'
+ expect(child_pipeline.bridges.reload.first.status).to eq 'canceled'
+ end
+
+ context 'with nested child pipelines' do
+ let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
+ let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
+
+ it 'cancels them' do
+ cancel_running
+
+ expect(nested_child_pipeline.reload.status).to eq 'canceled'
+ expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
+ end
+ end
+
+ context 'when auto canceled' do
+ let(:canceled_by) { create(:ci_empty_pipeline) }
+
+ it 'sets auto cancel' do
+ cancel_running
+
+ pipeline.reload
+
+ jobs_canceled_by_ids = pipeline.statuses.map(&:auto_canceled_by_id)
+ child_pipelines_canceled_by_ids = pipeline.child_pipelines.map(&:auto_canceled_by_id)
+ child_pipelines_jobs_canceled_by_ids = pipeline.child_pipelines.map(&:statuses).flatten.map(&:auto_canceled_by_id)
+
+ expect(jobs_canceled_by_ids).to contain_exactly(canceled_by.id)
+ expect(pipeline.auto_canceled_by_id).to eq(canceled_by.id)
+ expect(child_pipelines_canceled_by_ids).to contain_exactly(canceled_by.id)
+ expect(child_pipelines_jobs_canceled_by_ids).to contain_exactly(canceled_by.id, canceled_by.id)
+ end
+ end
+
+ context 'when execute_async is false' do
+ let(:execute_async) { false }
+
+ it 'runs sync' do
+ expect(::Ci::CancelPipelineWorker).not_to receive(:perform_async)
+
+ cancel_running
+ end
+
+ it 'cancels children' do
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(canceled canceled)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ context 'with nested child pipelines' do
+ let!(:nested_child_pipeline) { create(:ci_empty_pipeline, :created, child_of: child_pipeline) }
+ let!(:nested_child_pipeline_build) { create(:ci_build, :created, pipeline: nested_child_pipeline) }
+
+ it 'cancels them' do
+ cancel_running
+
+ expect(nested_child_pipeline.reload.status).to eq 'canceled'
+ expect(nested_child_pipeline_build.reload.status).to eq 'canceled'
+ end
+ end
+ end
+ end
+
+ it 'does not cancel uncancelable child pipeline builds' do
+ create(:ci_build, :failed, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(failed)
+ expect(latest_status).to eq %w(canceled)
+ end
+ end
+
+ context 'when cascade_to_children is false' do
+ let(:cascade_to_children) { false }
+
+ subject(:cancel_running) { pipeline.cancel_running(cascade_to_children: cascade_to_children) }
+
+ it 'does not cancel cancelable child pipeline builds' do
+ create(:ci_build, :created, pipeline: child_pipeline)
+ create(:ci_build, :running, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.order_id_desc.pluck(:status)
+ expect(latest_status_for_child).to eq %w(running created)
+ expect(latest_status).to eq %w(canceled)
+ end
+
+ it 'does not cancel uncancelable child pipeline builds' do
+ create(:ci_build, :failed, pipeline: child_pipeline)
+
+ cancel_running
+
+ latest_status_for_child = child_pipeline.statuses.pluck(:status)
+ expect(latest_status_for_child).to eq %w(failed)
+ expect(latest_status).to eq %w(canceled)
end
end
end
@@ -3352,7 +3607,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
context 'when pipeline is a triggered pipeline' do
- let!(:upstream) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline)}
+ let!(:upstream) { create(:ci_pipeline, project: create(:project), upstream_of: pipeline) }
it 'returns self id' do
expect(subject).to contain_exactly(pipeline.id)
@@ -4335,24 +4590,6 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
end
-
- describe '#find_stage_by_name' do
- subject { pipeline.find_stage_by_name!(stage_name) }
-
- context 'when stage exists' do
- it { is_expected.to eq(stage) }
- end
-
- context 'when stage does not exist' do
- let(:stage_name) { 'build' }
-
- it 'raises an ActiveRecord exception' do
- expect do
- subject
- end.to raise_exception(ActiveRecord::RecordNotFound)
- end
- end
- end
end
describe '#full_error_messages' do
diff --git a/spec/models/ci/processable_spec.rb b/spec/models/ci/processable_spec.rb
index 789ae3a2ccc..127a1417d9e 100644
--- a/spec/models/ci/processable_spec.rb
+++ b/spec/models/ci/processable_spec.rb
@@ -72,7 +72,7 @@ RSpec.describe Ci::Processable do
job_artifacts_network_referee job_artifacts_dotenv
job_artifacts_cobertura needs job_artifacts_accessibility
job_artifacts_requirements job_artifacts_coverage_fuzzing
- job_artifacts_api_fuzzing terraform_state_versions].freeze
+ job_artifacts_api_fuzzing terraform_state_versions job_artifacts_cyclonedx].freeze
end
let(:ignore_accessors) do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 2fbfbbaf830..ae8748f8ae3 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -489,7 +489,7 @@ RSpec.describe Ci::Runner do
let!(:runner3) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 2.months.ago) }
let!(:runner4) { create(:ci_runner, :instance, contacted_at: 1.month.ago, created_at: 3.months.ago) }
- it { is_expected.to eq([runner1, runner3, runner4])}
+ it { is_expected.to eq([runner1, runner3, runner4]) }
end
describe '.active' do
@@ -552,6 +552,10 @@ RSpec.describe Ci::Runner do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
allow_any_instance_of(described_class).to receive(:cached_attribute)
.with(:platform).and_return("darwin")
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:version).and_return("14.0.0")
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).once
end
context 'table tests' do
@@ -623,6 +627,10 @@ RSpec.describe Ci::Runner do
allow_any_instance_of(described_class).to receive(:cached_attribute).and_call_original
allow_any_instance_of(described_class).to receive(:cached_attribute)
.with(:platform).and_return("darwin")
+ allow_any_instance_of(described_class).to receive(:cached_attribute)
+ .with(:version).and_return("14.0.0")
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async).once
end
context 'no cache value' do
@@ -693,19 +701,6 @@ RSpec.describe Ci::Runner do
it { is_expected.to eq([runner1]) }
end
- describe '#tick_runner_queue' do
- it 'sticks the runner to the primary and calls the original method' do
- runner = create(:ci_runner)
-
- expect(described_class.sticking).to receive(:stick)
- .with(:runner, runner.id)
-
- expect(Gitlab::Workhorse).to receive(:set_key_and_notify)
-
- runner.tick_runner_queue
- end
- end
-
describe '#matches_build?' do
using RSpec::Parameterized::TableSyntax
@@ -866,7 +861,7 @@ RSpec.describe Ci::Runner do
describe '#status' do
let(:runner) { build(:ci_runner, :instance, created_at: 4.months.ago) }
- let(:legacy_mode) { }
+ let(:legacy_mode) {}
subject { runner.status(legacy_mode) }
@@ -989,6 +984,16 @@ RSpec.describe Ci::Runner do
it 'returns a new last_update value' do
expect(runner.tick_runner_queue).not_to be_empty
end
+
+ it 'sticks the runner to the primary and calls the original method' do
+ runner = create(:ci_runner)
+
+ expect(described_class.sticking).to receive(:stick).with(:runner, runner.id)
+
+ expect(Gitlab::Workhorse).to receive(:set_key_and_notify)
+
+ runner.tick_runner_queue
+ end
end
describe '#ensure_runner_queue_value' do
@@ -1055,14 +1060,19 @@ RSpec.describe Ci::Runner do
it 'updates cache' do
expect_redis_update
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to receive(:perform_async)
heartbeat
+
+ expect(runner.runner_version).to be_nil
end
end
context 'when database was not updated recently' do
before do
runner.contacted_at = 2.hours.ago
+
+ allow(Ci::Runners::ProcessRunnerVersionUpdateWorker).to receive(:perform_async)
end
context 'with invalid runner' do
@@ -1075,12 +1085,25 @@ RSpec.describe Ci::Runner do
expect_redis_update
does_db_update
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
+ end
+ end
+
+ context 'with unchanged runner version' do
+ let(:runner) { create(:ci_runner, version: version) }
+
+ it 'does not schedule ci_runner_versions update' do
+ heartbeat
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).not_to have_received(:perform_async)
end
end
it 'updates redis cache and database' do
expect_redis_update
does_db_update
+ expect(Ci::Runners::ProcessRunnerVersionUpdateWorker).to have_received(:perform_async).once
end
%w(custom shell docker docker-windows docker-ssh ssh parallels virtualbox docker+machine docker-ssh+machine kubernetes some-unknown-type).each do |executor|
@@ -1795,11 +1818,21 @@ RSpec.describe Ci::Runner do
end
context ':recommended' do
- let(:upgrade_status) { :recommended}
+ let(:upgrade_status) { :recommended }
it 'returns runners whose version is assigned :recommended' do
is_expected.to contain_exactly(runner_14_1_0)
end
end
+
+ describe 'composed with other scopes' do
+ subject { described_class.active(false).with_upgrade_status(:available) }
+
+ let(:inactive_runner_14_0_0) { create(:ci_runner, version: '14.0.0', active: false) }
+
+ it 'returns runner matching the composed scope' do
+ is_expected.to contain_exactly(inactive_runner_14_0_0)
+ end
+ end
end
end
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
index d3395942a39..7a4b2e8f21e 100644
--- a/spec/models/ci/runner_version_spec.rb
+++ b/spec/models/ci/runner_version_spec.rb
@@ -27,16 +27,11 @@ RSpec.describe Ci::RunnerVersion do
create(:ci_runner_version, version: 'abc456', status: :available)
end
- let_it_be(:runner_version_unknown) do
- create(:ci_runner_version, version: 'abc567', status: :unknown)
- end
-
- it 'contains any runner version that is not already recommended' do
+ it 'contains any valid or unprocessed runner version that is not already recommended' do
is_expected.to match_array([
runner_version_nil,
runner_version_not_available,
- runner_version_available,
- runner_version_unknown
+ runner_version_available
])
end
end
diff --git a/spec/models/ci/secure_file_spec.rb b/spec/models/ci/secure_file_spec.rb
index a3f1c7b7ef7..e47efff5dfd 100644
--- a/spec/models/ci/secure_file_spec.rb
+++ b/spec/models/ci/secure_file_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe Ci::SecureFile do
it { is_expected.to validate_presence_of(:file_store) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:project_id) }
+
context 'unique filename' do
let_it_be(:project1) { create(:project) }
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index 65ead01a2bd..73cd7bb9075 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -1278,14 +1278,14 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
context 'generic timeout' do
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
- let(:error_message) { 'Timed out connecting to server'}
+ let(:error_message) { 'Timed out connecting to server' }
it { is_expected.to eq(**connection_status, **expected_nodes) }
end
context 'gateway timeout' do
let(:connection_status) { { connection_status: :unreachable, connection_error: :http_error } }
- let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1'}
+ let(:error_message) { '504 Gateway Timeout for GET https://kubernetes.example.com/api/v1' }
it { is_expected.to eq(**connection_status, **expected_nodes) }
end
diff --git a/spec/models/commit_signatures/ssh_signature_spec.rb b/spec/models/commit_signatures/ssh_signature_spec.rb
index ac4496e9d8c..64d95fe3a71 100644
--- a/spec/models/commit_signatures/ssh_signature_spec.rb
+++ b/spec/models/commit_signatures/ssh_signature_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe CommitSignatures::SshSignature do
it_behaves_like 'commit signature'
describe 'associations' do
- it { is_expected.to belong_to(:key).required }
+ it { is_expected.to belong_to(:key).optional }
end
describe '.by_commit_sha scope' do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 3cccc41a892..78d4d9de84e 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -747,7 +747,7 @@ RSpec.describe CommitStatus do
end
context 'when failure_reason is nil' do
- let(:reason) { }
+ let(:reason) {}
let(:failure_reason) { 'unknown_failure' }
it { is_expected.to be_unknown_failure }
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index e6b197f34ca..569dc3a3a3e 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -170,7 +170,9 @@ RSpec.describe BulkInsertSafe do
all_items = bulk_insert_item_class.valid_list(10) + bulk_insert_item_class.invalid_list(10)
expect do
- bulk_insert_item_class.bulk_insert!(all_items, batch_size: 2) rescue nil
+ bulk_insert_item_class.bulk_insert!(all_items, batch_size: 2)
+ rescue StandardError
+ nil
end.not_to change { bulk_insert_item_class.count }
end
diff --git a/spec/models/concerns/chronic_duration_attribute_spec.rb b/spec/models/concerns/chronic_duration_attribute_spec.rb
index 00e28e19bd5..61b86455840 100644
--- a/spec/models/concerns/chronic_duration_attribute_spec.rb
+++ b/spec/models/concerns/chronic_duration_attribute_spec.rb
@@ -95,8 +95,8 @@ end
RSpec.describe 'ChronicDurationAttribute' do
context 'when default value is not set' do
- let(:source_field) {:maximum_timeout}
- let(:virtual_field) {:maximum_timeout_human_readable}
+ let(:source_field) { :maximum_timeout }
+ let(:virtual_field) { :maximum_timeout_human_readable }
let(:default_value) { nil }
subject { create(:ci_runner) }
@@ -106,8 +106,8 @@ RSpec.describe 'ChronicDurationAttribute' do
end
context 'when default value is set' do
- let(:source_field) {:build_timeout}
- let(:virtual_field) {:build_timeout_human_readable}
+ let(:source_field) { :build_timeout }
+ let(:virtual_field) { :build_timeout_human_readable }
let(:default_value) { 3600 }
subject { create(:project) }
@@ -118,8 +118,8 @@ RSpec.describe 'ChronicDurationAttribute' do
end
RSpec.describe 'ChronicDurationAttribute - reader' do
- let(:source_field) {:timeout}
- let(:virtual_field) {:timeout_human_readable}
+ let(:source_field) { :timeout }
+ let(:virtual_field) { :timeout_human_readable }
subject { create(:ci_build).ensure_metadata }
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index 6af244a5a0f..64691165e21 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -46,8 +46,30 @@ RSpec.describe Ci::Artifactable do
end
end
+ context 'when file format is zip' do
+ context 'when artifact contains one file' do
+ let(:artifact) { build(:ci_job_artifact, :zip_with_single_file) }
+
+ it 'iterates blob once' do
+ expect { |b| artifact.each_blob(&b) }.to yield_control.once
+ end
+ end
+
+ context 'when artifact contains two files' do
+ let(:artifact) { build(:ci_job_artifact, :zip_with_multiple_files) }
+
+ it 'iterates blob two times' do
+ expect { |b| artifact.each_blob(&b) }.to yield_control.exactly(2).times
+ end
+ end
+ end
+
context 'when there are no adapters for the file format' do
- let(:artifact) { build(:ci_job_artifact, :junit, file_format: :zip) }
+ let(:artifact) { build(:ci_job_artifact, :junit) }
+
+ before do
+ allow(artifact).to receive(:file_format).and_return(:unknown)
+ end
it 'raises an error' do
expect { |b| artifact.each_blob(&b) }.to raise_error(described_class::NotSupportedAdapterError)
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index a19fbae3cfb..8d32ef14f47 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
end
describe 'after_flush callbacks' do
- let(:attribute) { model.class.counter_attributes.first}
+ let(:attribute) { model.class.counter_attributes.first }
subject { model.flush_increments_to_database!(attribute) }
diff --git a/spec/models/concerns/cross_database_modification_spec.rb b/spec/models/concerns/cross_database_modification_spec.rb
index 72544536953..c3831b654cf 100644
--- a/spec/models/concerns/cross_database_modification_spec.rb
+++ b/spec/models/concerns/cross_database_modification_spec.rb
@@ -4,38 +4,6 @@ require 'spec_helper'
RSpec.describe CrossDatabaseModification do
describe '.transaction' do
- context 'feature flag disabled' do
- before do
- stub_feature_flags(track_gitlab_schema_in_current_transaction: false)
- end
-
- it 'does not add to gitlab_transactions_stack' do
- ApplicationRecord.transaction do
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
-
- Project.first
- end
-
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
- end
- end
-
- context 'feature flag is not yet setup' do
- before do
- allow(Feature::FlipperFeature).to receive(:table_exists?).and_raise(ActiveRecord::NoDatabaseError)
- end
-
- it 'does not add to gitlab_transactions_stack' do
- ApplicationRecord.transaction do
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
-
- Project.first
- end
-
- expect(ApplicationRecord.gitlab_transactions_stack).to be_empty
- end
- end
-
it 'adds the current gitlab schema to gitlab_transactions_stack', :aggregate_failures do
ApplicationRecord.transaction do
expect(ApplicationRecord.gitlab_transactions_stack).to contain_exactly(:gitlab_main)
diff --git a/spec/models/concerns/database_event_tracking_spec.rb b/spec/models/concerns/database_event_tracking_spec.rb
new file mode 100644
index 00000000000..976462b4174
--- /dev/null
+++ b/spec/models/concerns/database_event_tracking_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe DatabaseEventTracking, :snowplow do
+ let(:test_class) do
+ Class.new(ActiveRecord::Base) do
+ include DatabaseEventTracking
+
+ self.table_name = 'application_setting_terms'
+
+ self::SNOWPLOW_ATTRIBUTES = %w[id].freeze # rubocop:disable RSpec/LeakyConstantDeclaration
+ end
+ end
+
+ subject(:create_test_class_record) { test_class.create!(id: 1, terms: "") }
+
+ context 'if event emmiter failed' do
+ before do
+ allow(Gitlab::Tracking).to receive(:event).and_raise(StandardError) # rubocop:disable RSpec/ExpectGitlabTracking
+ end
+
+ it 'tracks the exception' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ create_test_class_record
+ end
+ end
+
+ context 'if product_intelligence_database_event_tracking FF is off' do
+ before do
+ stub_feature_flags(product_intelligence_database_event_tracking: false)
+ end
+
+ it 'does not track the event' do
+ create_test_class_record
+
+ expect_no_snowplow_event
+ end
+ end
+
+ describe 'event tracking' do
+ let(:category) { test_class.to_s }
+ let(:event) { 'database_event' }
+
+ it 'when created' do
+ create_test_class_record
+
+ expect_snowplow_event(category: category, action: "#{event}_create", label: 'application_setting_terms',
+ property: 'create', namespace: nil, "id" => 1)
+ end
+
+ it 'when updated' do
+ create_test_class_record
+ test_class.first.update!(id: 3)
+
+ expect_snowplow_event(category: category, action: "#{event}_update", label: 'application_setting_terms',
+ property: 'update', namespace: nil, "id" => 3)
+ end
+
+ it 'when destroyed' do
+ create_test_class_record
+ test_class.first.destroy!
+
+ expect_snowplow_event(category: category, action: "#{event}_destroy", label: 'application_setting_terms',
+ property: 'destroy', namespace: nil, "id" => 1)
+ end
+ end
+end
diff --git a/spec/models/concerns/expirable_spec.rb b/spec/models/concerns/expirable_spec.rb
index 5eb6530881e..50dfb138ac9 100644
--- a/spec/models/concerns/expirable_spec.rb
+++ b/spec/models/concerns/expirable_spec.rb
@@ -16,6 +16,11 @@ RSpec.describe Expirable do
it { expect(ProjectMember.expired).to match_array([expired]) }
end
+ describe '.not_expired' do
+ it { expect(ProjectMember.not_expired).to include(no_expire, expire_later) }
+ it { expect(ProjectMember.not_expired).not_to include(expired) }
+ end
+
describe '#expired?' do
it { expect(no_expire.expired?).to eq(false) }
it { expect(expire_later.expired?).to eq(false) }
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 87821de3cf5..6763cc904b4 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -569,6 +569,27 @@ RSpec.describe Issuable do
end
end
+ context 'merge_request update reviewers' do
+ let(:merge_request) { create(:merge_request) }
+ let(:user2) { create(:user) }
+
+ before do
+ merge_request.update!(reviewers: [user])
+ merge_request.update!(reviewers: [user, user2])
+ expect(Gitlab::DataBuilder::Issuable)
+ .to receive(:new).with(merge_request).and_return(builder)
+ end
+
+ it 'delegates to Gitlab::DataBuilder::Issuable#build' do
+ expect(builder).to receive(:build).with(
+ user: user,
+ changes: hash_including(
+ 'reviewers' => [[user.hook_attrs], [user.hook_attrs, user2.hook_attrs]]
+ ))
+ merge_request.to_hook_data(user, old_associations: { reviewers: [user] })
+ end
+ end
+
context 'incident severity is updated' do
let(:issue) { create(:incident) }
diff --git a/spec/models/concerns/nullify_if_blank_spec.rb b/spec/models/concerns/nullify_if_blank_spec.rb
index 2d1bdba39dd..b0e229f4c91 100644
--- a/spec/models/concerns/nullify_if_blank_spec.rb
+++ b/spec/models/concerns/nullify_if_blank_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe NullifyIfBlank do
context 'attribute is nil' do
let(:name) { nil }
- it { is_expected.to be_nil}
+ it { is_expected.to be_nil }
end
context 'attribute is not blank' do
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index b92c7c52f0b..f7f68cb38d8 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -124,6 +124,7 @@ RSpec.describe Participable do
end
let(:readable) { true }
+ let(:project) { build(:project, :public) }
it 'returns the list of participants' do
model.participant(:foo)
@@ -132,7 +133,6 @@ RSpec.describe Participable do
user1 = build(:user)
user2 = build(:user)
user3 = build(:user)
- project = build(:project, :public)
instance = model.new
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
@@ -155,7 +155,6 @@ RSpec.describe Participable do
instance = model.new
user1 = build(:user)
user2 = build(:user)
- project = build(:project, :public)
allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
allow(instance).to receive(:bar).and_return(user2)
@@ -164,6 +163,29 @@ RSpec.describe Participable do
expect(instance.visible_participants(user1)).to be_empty
end
end
+
+ context 'with multiple system notes from the same author and mentioned_users' do
+ let!(:user1) { create(:user) }
+ let!(:user2) { create(:user) }
+
+ it 'skips expensive checks if the author is aleady in participants list' do
+ model.participant(:notes)
+
+ instance = model.new
+ note1 = create(:system_note, author: user1)
+ note2 = create(:system_note, author: user1) # only skip system notes with no mentioned users
+ note3 = create(:system_note, author: user1, note: "assigned to #{user2.to_reference}")
+ note4 = create(:note, author: user2)
+
+ allow(instance).to receive(:project).and_return(project)
+ allow(instance).to receive_message_chain(:model_name, :element) { 'class' }
+ allow(instance).to receive(:notes).and_return([note1, note2, note3, note4])
+
+ allow(Ability).to receive(:allowed?).with(anything, :read_project, anything).and_return(true)
+ allow(Ability).to receive(:allowed?).with(anything, :read_note, anything).exactly(3).times.and_return(true)
+ expect(instance.visible_participants(user1)).to match_array [user1, user2]
+ end
+ end
end
describe '#participant?' do
diff --git a/spec/models/concerns/project_features_compatibility_spec.rb b/spec/models/concerns/project_features_compatibility_spec.rb
index f2dc8464e86..b49b9ce8a2a 100644
--- a/spec/models/concerns/project_features_compatibility_spec.rb
+++ b/spec/models/concerns/project_features_compatibility_spec.rb
@@ -5,7 +5,11 @@ require 'spec_helper'
RSpec.describe ProjectFeaturesCompatibility do
let(:project) { create(:project) }
let(:features_enabled) { %w(issues wiki builds merge_requests snippets security_and_compliance) }
- let(:features) { features_enabled + %w(repository pages operations container_registry package_registry) }
+ let(:features) do
+ features_enabled + %w(
+ repository pages operations container_registry package_registry environments feature_flags releases
+ )
+ end
# We had issues_enabled, snippets_enabled, builds_enabled, merge_requests_enabled and issues_enabled fields on projects table
# All those fields got moved to a new table called project_feature and are now integers instead of booleans
diff --git a/spec/models/concerns/reactive_caching_spec.rb b/spec/models/concerns/reactive_caching_spec.rb
index 5468699f9dd..cb9bb676ede 100644
--- a/spec/models/concerns/reactive_caching_spec.rb
+++ b/spec/models/concerns/reactive_caching_spec.rb
@@ -320,7 +320,7 @@ RSpec.describe ReactiveCaching, :use_clean_rails_memory_store_caching do
stub_reactive_cache(instance, "preexisting")
end
- let(:calculation) { -> { raise "foo"} }
+ let(:calculation) { -> { raise "foo" } }
it 'leaves the cache untouched' do
expect { go! }.to raise_error("foo")
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index a2ce02f4661..3f6bbe795cc 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
subject { described_class.send(:add_authentication_token_field, :runners_registration_token) }
it 'raises error' do
- expect {subject}.to raise_error(ArgumentError)
+ expect { subject }.to raise_error(ArgumentError)
end
end
end
@@ -126,7 +126,7 @@ RSpec.describe PersonalAccessToken, 'TokenAuthenticatable' do
end
end
- let(:token_value) { 'token' }
+ let(:token_value) { Devise.friendly_token }
let(:token_digest) { Gitlab::CryptoHelper.sha256(token_value) }
let(:user) { create(:user) }
let(:personal_access_token) do
@@ -442,7 +442,7 @@ RSpec.shared_examples 'prefixed token rotation' do
context 'token is not set' do
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).not_to be_persisted
end
end
@@ -453,7 +453,7 @@ RSpec.shared_examples 'prefixed token rotation' do
end
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).not_to be_persisted
end
end
@@ -475,7 +475,7 @@ RSpec.shared_examples 'prefixed token rotation' do
context 'token is not set' do
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).to be_persisted
end
end
@@ -486,7 +486,7 @@ RSpec.shared_examples 'prefixed token rotation' do
end
it 'generates a new token' do
- expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/)
+ expect(subject).to match(/^#{RunnersTokenPrefixable::RUNNERS_TOKEN_PREFIX}/o)
expect(instance).to be_persisted
end
end
diff --git a/spec/models/container_expiration_policy_spec.rb b/spec/models/container_expiration_policy_spec.rb
index 191913ed454..b88eddf19dc 100644
--- a/spec/models/container_expiration_policy_spec.rb
+++ b/spec/models/container_expiration_policy_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe ContainerExpirationPolicy, type: :model do
end
context 'when there are no runnable schedules' do
- let!(:policy) { }
+ let!(:policy) {}
it 'returns an empty array' do
is_expected.to be_empty
diff --git a/spec/models/container_repository_spec.rb b/spec/models/container_repository_spec.rb
index e35788b1848..a4329993e91 100644
--- a/spec/models/container_repository_spec.rb
+++ b/spec/models/container_repository_spec.rb
@@ -525,6 +525,162 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#each_tags_page' do
+ let(:page_size) { 100 }
+
+ shared_examples 'iterating through a page' do |expected_tags: true|
+ it 'iterates through one page' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }
+ .to yield_with_args(expected_tags ? expected_tags_from(client_response_tags) : [])
+ end
+ end
+
+ context 'with an empty page' do
+ let(:client_response) { { pagination: {}, response_body: [] } }
+
+ it_behaves_like 'iterating through a page', expected_tags: false
+ end
+
+ context 'with one page' do
+ let(:client_response) { { pagination: {}, response_body: client_response_tags } }
+ let(:client_response_tags) do
+ [
+ {
+ 'name' => '0.1.0',
+ 'created_at' => '2022-06-07T12:10:12.412+00:00'
+ },
+ {
+ 'name' => 'latest',
+ 'created_at' => '2022-06-07T12:11:13.633+00:00'
+ }
+ ]
+ end
+
+ context 'with a nil created_at' do
+ let(:client_response_tags) { ['name' => '0.1.0', 'created_at' => nil] }
+
+ it_behaves_like 'iterating through a page'
+ end
+
+ context 'with an invalid created_at' do
+ let(:client_response_tags) { ['name' => '0.1.0', 'created_at' => 'not_a_timestamp'] }
+
+ it_behaves_like 'iterating through a page'
+ end
+ end
+
+ context 'with two pages' do
+ let(:client_response1) { { pagination: { next: { uri: URI('http://localhost/next?last=latest') } }, response_body: client_response_tags1 } }
+ let(:client_response_tags1) do
+ [
+ {
+ 'name' => '0.1.0',
+ 'created_at' => '2022-06-07T12:10:12.412+00:00'
+ },
+ {
+ 'name' => 'latest',
+ 'created_at' => '2022-06-07T12:11:13.633+00:00'
+ }
+ ]
+ end
+
+ let(:client_response2) { { pagination: {}, response_body: client_response_tags2 } }
+ let(:client_response_tags2) do
+ [
+ {
+ 'name' => '1.2.3',
+ 'created_at' => '2022-06-10T12:10:15.412+00:00'
+ },
+ {
+ 'name' => '2.3.4',
+ 'created_at' => '2022-06-11T12:11:17.633+00:00'
+ }
+ ]
+ end
+
+ it 'iterates through two pages' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response1)
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: 'latest')
+ .and_return(client_response2)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }
+ .to yield_successive_args(expected_tags_from(client_response_tags1), expected_tags_from(client_response_tags2))
+ end
+ end
+
+ context 'when max pages is reached' do
+ before do
+ stub_const('ContainerRepository::MAX_TAGS_PAGES', 0)
+ end
+
+ it 'raises an error' do
+ expect { repository.each_tags_page(page_size: page_size) {} }
+ .to raise_error(StandardError, 'too many pages requested')
+ end
+ end
+
+ context 'without a block set' do
+ it 'raises an Argument error' do
+ expect { repository.each_tags_page(page_size: page_size) }.to raise_error(ArgumentError, 'block not given')
+ end
+ end
+
+ context 'without a page size set' do
+ let(:client_response) { { pagination: {}, response_body: [] } }
+
+ it 'uses a default size' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: 100, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(&b) }.to yield_with_args([])
+ end
+ end
+
+ context 'with an empty client response' do
+ let(:client_response) { {} }
+
+ it 'breaks the loop' do
+ expect(repository.gitlab_api_client).to receive(:tags)
+ .with(repository.path, page_size: page_size, last: nil)
+ .and_return(client_response)
+ expect { |b| repository.each_tags_page(page_size: page_size, &b) }.not_to yield_control
+ end
+ end
+
+ context 'with a nil page' do
+ let(:client_response) { { pagination: {}, response_body: nil } }
+
+ it_behaves_like 'iterating through a page', expected_tags: false
+ end
+
+ context 'calling on a non migrated repository' do
+ before do
+ repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ end
+
+ it 'raises an Argument error' do
+ expect { repository.each_tags_page }.to raise_error(ArgumentError, 'not a migrated repository')
+ end
+ end
+
+ def expected_tags_from(client_tags)
+ client_tags.map do |tag|
+ created_at =
+ begin
+ DateTime.iso8601(tag['created_at'])
+ rescue ArgumentError
+ nil
+ end
+ an_object_having_attributes(name: tag['name'], created_at: created_at)
+ end
+ end
+ end
+
describe '#tags_count' do
it 'returns the count of tags' do
expect(repository.tags_count).to eq(1)
@@ -1195,7 +1351,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'importing' || state == 'pre_importing') }
end
@@ -1207,7 +1363,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'importing') }
end
@@ -1219,7 +1375,7 @@ RSpec.describe ContainerRepository, :aggregate_failures do
described_class::MIGRATION_STATES.each do |state|
context "when in #{state} migration_state" do
- let(:container_repository) { create(:container_repository, state.to_sym)}
+ let(:container_repository) { create(:container_repository, state.to_sym) }
it { is_expected.to eq(state == 'pre_importing') }
end
@@ -1348,6 +1504,28 @@ RSpec.describe ContainerRepository, :aggregate_failures do
end
end
+ describe '#migrated?' do
+ subject { repository.migrated? }
+
+ it { is_expected.to eq(true) }
+
+ context 'with a created_at older than phase 1 ends' do
+ before do
+ repository.update!(created_at: described_class::MIGRATION_PHASE_1_ENDED_AT - 3.days)
+ end
+
+ it { is_expected.to eq(false) }
+
+ context 'with migration state set to import_done' do
+ before do
+ repository.update!(migration_state: 'import_done')
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
context 'with repositories' do
let_it_be_with_reload(:repository) { create(:container_repository, :cleanup_unscheduled) }
let_it_be(:other_repository) { create(:container_repository, :cleanup_unscheduled) }
diff --git a/spec/models/customer_relations/contact_spec.rb b/spec/models/customer_relations/contact_spec.rb
index f91546f5240..487af404a7c 100644
--- a/spec/models/customer_relations/contact_spec.rb
+++ b/spec/models/customer_relations/contact_spec.rb
@@ -226,15 +226,58 @@ RSpec.describe CustomerRelations::Contact, type: :model do
end
end
- describe '.sort_by_name' do
+ describe '.counts_by_state' do
+ before do
+ create_list(:contact, 3, group: group)
+ create_list(:contact, 2, group: group, state: 'inactive')
+ end
+
+ it 'returns correct contact counts' do
+ counts = group.contacts.counts_by_state
+
+ expect(counts['active']).to be(3)
+ expect(counts['inactive']).to be(2)
+ end
+ end
+
+ describe 'sorting' do
+ let_it_be(:organization_a) { create(:organization, name: 'a') }
+ let_it_be(:organization_b) { create(:organization, name: 'b') }
let_it_be(:contact_a) { create(:contact, group: group, first_name: "c", last_name: "d") }
- let_it_be(:contact_b) { create(:contact, group: group, first_name: "a", last_name: "b") }
- let_it_be(:contact_c) { create(:contact, group: group, first_name: "e", last_name: "d") }
+ let_it_be(:contact_b) do
+ create(:contact,
+ group: group,
+ first_name: "a",
+ last_name: "b",
+ phone: "123",
+ organization: organization_a)
+ end
- context 'when sorting the contacts' do
- it 'sorts them by last name then first name in ascendent order' do
+ let_it_be(:contact_c) do
+ create(:contact,
+ group: group,
+ first_name: "e",
+ last_name: "d",
+ phone: "456",
+ organization: organization_b)
+ end
+
+ describe '.sort_by_name' do
+ it 'sorts them by last name then first name in ascending order' do
expect(group.contacts.sort_by_name).to eq([contact_b, contact_a, contact_c])
end
end
+
+ describe '.sort_by_organization' do
+ it 'sorts them by organization in descending order' do
+ expect(group.contacts.sort_by_organization(:desc)).to eq([contact_c, contact_b, contact_a])
+ end
+ end
+
+ describe '.sort_by_field' do
+ it 'sorts them by phone in ascending order' do
+ expect(group.contacts.sort_by_field('phone', :asc)).to eq([contact_b, contact_c, contact_a])
+ end
+ end
end
end
diff --git a/spec/models/customer_relations/contact_state_counts_spec.rb b/spec/models/customer_relations/contact_state_counts_spec.rb
new file mode 100644
index 00000000000..a19f6f08489
--- /dev/null
+++ b/spec/models/customer_relations/contact_state_counts_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomerRelations::ContactStateCounts do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :crm_enabled) }
+
+ let(:counter) { described_class.new(user, group, params) }
+ let(:params) { {} }
+
+ before_all do
+ group.add_reporter(user)
+ create(:contact, group: group, first_name: 'filter')
+ create(:contact, group: group, last_name: 'filter')
+ create(:contact, group: group)
+ create(:contact, group: group, state: 'inactive', email: 'filter@example.com')
+ create(:contact, group: group, state: 'inactive')
+ end
+
+ describe '.declarative_policy_class' do
+ subject { described_class.declarative_policy_class }
+
+ it { is_expected.to eq('CustomerRelations::ContactPolicy') }
+ end
+
+ describe '#all' do
+ it 'returns the total number of contacts' do
+ expect(counter.all).to be(5)
+ end
+ end
+
+ describe '#active' do
+ it 'returns the number of active contacts' do
+ expect(counter.active).to be(3)
+ end
+ end
+
+ describe '#inactive' do
+ it 'returns the number of inactive contacts' do
+ expect(counter.inactive).to be(2)
+ end
+ end
+
+ describe 'when filtered' do
+ let(:params) { { search: 'filter' } }
+
+ it '#all returns the number of contacts with a filter' do
+ expect(counter.all).to be(3)
+ end
+
+ it '#active returns the number of active contacts with a filter' do
+ expect(counter.active).to be(2)
+ end
+
+ it '#inactive returns the number of inactive contacts with a filter' do
+ expect(counter.inactive).to be(1)
+ end
+ end
+end
diff --git a/spec/models/data_list_spec.rb b/spec/models/data_list_spec.rb
index 67db2730a78..6e01f4786ba 100644
--- a/spec/models/data_list_spec.rb
+++ b/spec/models/data_list_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe DataList do
let(:zentao_integration) { create(:zentao_integration) }
let(:cases) do
[
- [jira_integration, 'Integrations::JiraTrackerData', 'service_id'],
+ [jira_integration, 'Integrations::JiraTrackerData', 'integration_id'],
[zentao_integration, 'Integrations::ZentaoTrackerData', 'integration_id']
]
end
diff --git a/spec/models/deploy_key_spec.rb b/spec/models/deploy_key_spec.rb
index c22bad0e062..3272d5236d3 100644
--- a/spec/models/deploy_key_spec.rb
+++ b/spec/models/deploy_key_spec.rb
@@ -5,17 +5,20 @@ require 'spec_helper'
RSpec.describe DeployKey, :mailer do
describe "Associations" do
it { is_expected.to have_many(:deploy_keys_projects) }
+
it do
is_expected.to have_many(:deploy_keys_projects_with_write_access)
.conditions(can_push: true)
.class_name('DeployKeysProject')
end
+
it do
is_expected.to have_many(:projects_with_write_access)
.class_name('Project')
.through(:deploy_keys_projects_with_write_access)
.source(:project)
end
+
it { is_expected.to have_many(:projects) }
it { is_expected.to have_many(:protected_branch_push_access_levels) }
end
@@ -146,4 +149,10 @@ RSpec.describe DeployKey, :mailer do
end
end
end
+
+ describe '#audit_details' do
+ it "equals to the key's title" do
+ expect(subject.audit_details).to eq(subject.title)
+ end
+ end
end
diff --git a/spec/models/design_management/version_spec.rb b/spec/models/design_management/version_spec.rb
index 303bac61e1e..519ba3c67b4 100644
--- a/spec/models/design_management/version_spec.rb
+++ b/spec/models/design_management/version_spec.rb
@@ -142,14 +142,18 @@ RSpec.describe DesignManagement::Version do
it 'does not leave invalid versions around if creation fails' do
expect do
- described_class.create_for_designs([], 'abcdef', author) rescue nil
+ described_class.create_for_designs([], 'abcdef', author)
+ rescue StandardError
+ nil
end.not_to change { described_class.count }
end
it 'does not leave orphaned design-versions around if creation fails' do
actions = as_actions(designs)
expect do
- described_class.create_for_designs(actions, '', author) rescue nil
+ described_class.create_for_designs(actions, '', author)
+ rescue StandardError
+ nil
end.not_to change { DesignManagement::Action.count }
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index e3207636bdc..3f4372dafd0 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -42,6 +42,48 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
+ describe 'validate and sanitize external url' do
+ let_it_be_with_refind(:environment) { create(:environment) }
+
+ where(:source_external_url, :expected_error_message) do
+ nil | nil
+ 'http://example.com' | nil
+ 'example.com' | nil
+ 'www.example.io' | nil
+ 'http://$URL' | nil
+ 'http://$(URL)' | nil
+ 'custom://example.com' | nil
+ '1.1.1.1' | nil
+ '$BASE_URL/${CI_COMMIT_REF_NAME}' | nil
+ '$ENVIRONMENT_URL' | nil
+ 'https://$SUB.$MAIN' | nil
+ 'https://$SUB-$REGION.$MAIN' | nil
+ 'https://example.com?param={()}' | nil
+ 'http://XSS?x=<script>alert(1)</script>' | nil
+ 'https://user:${VARIABLE}@example.io' | nil
+ 'https://example.com/test?param={data}' | nil
+ 'http://${URL}' | 'URI is invalid'
+ 'https://${URL}.example/test' | 'URI is invalid'
+ 'http://test${CI_MERGE_REQUEST_IID}.example.com' | 'URI is invalid'
+ 'javascript:alert("hello")' | 'javascript scheme is not allowed'
+ end
+ with_them do
+ it 'sets an external URL or an error' do
+ environment.external_url = source_external_url
+
+ environment.valid?
+
+ if expected_error_message
+ expect(environment.errors[:external_url].first).to eq(expected_error_message)
+ else
+ expect(environment.errors[:external_url]).to be_empty,
+ "There were unexpected errors: #{environment.errors.full_messages}"
+ expect(environment.external_url).to eq(source_external_url)
+ end
+ end
+ end
+ end
+
describe '.before_save' do
it 'ensures environment tier when a new object is created' do
environment = build(:environment, name: 'gprd', tier: nil)
@@ -194,7 +236,7 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
context 'when query is nil' do
- let(:query) { }
+ let(:query) {}
it 'raises an error' do
expect { subject }.to raise_error(NoMethodError)
@@ -770,16 +812,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
it 'returns the successful deployment jobs for the last deployment pipeline' do
expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(batch_load_environment_last_deployment_group: false)
- end
-
- it 'returns the successful deployment jobs for the last deployment pipeline' do
- expect(subject.pluck(:id)).to contain_exactly(deployment_a.id, deployment_b.id)
- end
- end
end
end
@@ -817,8 +849,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#actions_for' do
let(:deployment) { create(:deployment, :success, environment: environment) }
let(:pipeline) { deployment.deployable.pipeline }
- let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_COMMIT_REF_NAME' )}
- let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' )}
+ let!(:review_action) { create(:ci_build, :manual, name: 'review-apps', pipeline: pipeline, environment: 'review/$CI_COMMIT_REF_NAME' ) }
+ let!(:production_action) { create(:ci_build, :manual, name: 'production', pipeline: pipeline, environment: 'production' ) }
it 'returns a list of actions with matching environment' do
expect(environment.actions_for('review/master')).to contain_exactly(review_action)
@@ -993,178 +1025,29 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
describe '#last_visible_deployable' do
subject { environment.last_visible_deployable }
- context 'does not join across databases' do
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
-
- before do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
- end
-
- it 'for direct call' do
- with_cross_joins_prevented do
- expect(subject.id).to eq(ci_build_b.id)
- end
- end
-
- it 'for preload' do
- environment.reload
-
- with_cross_joins_prevented do
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_deployable: []])
- expect(subject.id).to eq(ci_build_b.id)
- end
- end
+ let!(:deployment) do
+ create(:deployment, :success, project: project, environment: environment, deployable: deployable)
end
- context 'call after preload' do
- it 'fetches from association cache' do
- pipeline = create(:ci_pipeline, project: project)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
-
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_deployable: []])
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(subject.id).to eq(ci_build.id)
- end.count
+ let!(:deployable) { create(:ci_build, :success, project: project) }
- expect(query_count).to eq(0)
- end
+ it 'fetches the deployable through the last visible deployment' do
+ is_expected.to eq(deployable)
end
end
describe '#last_visible_pipeline' do
- let(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
-
- let(:environment) { create(:environment, project: project) }
- let(:commit) { project.commit }
-
- let(:success_pipeline) do
- create(:ci_pipeline, :success, project: project, user: user, sha: commit.sha)
- end
-
- let(:failed_pipeline) do
- create(:ci_pipeline, :failed, project: project, user: user, sha: commit.sha)
- end
-
- it 'uses the last deployment even if it failed' do
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
+ subject { environment.last_visible_pipeline }
- expect(last_pipeline).to eq(pipeline)
+ let!(:deployment) do
+ create(:deployment, :success, project: project, environment: environment, deployable: deployable)
end
- it 'returns nil if there is no deployment' do
- create(:ci_build, project: project, pipeline: success_pipeline)
+ let!(:deployable) { create(:ci_build, :success, project: project, pipeline: pipeline) }
+ let!(:pipeline) { create(:ci_pipeline, :success, project: project) }
- expect(environment.last_visible_pipeline).to be_nil
- end
-
- it 'does not return an invisible pipeline' do
- failed_pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build_a = create(:ci_build, project: project, pipeline: failed_pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_a, sha: commit.sha)
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build_b = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :created, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(failed_pipeline)
- end
-
- context 'does not join across databases' do
- let(:pipeline_a) { create(:ci_pipeline, project: project) }
- let(:pipeline_b) { create(:ci_pipeline, project: project) }
- let(:ci_build_a) { create(:ci_build, project: project, pipeline: pipeline_a) }
- let(:ci_build_b) { create(:ci_build, project: project, pipeline: pipeline_b) }
-
- before do
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b)
- end
-
- subject { environment.last_visible_pipeline }
-
- it 'for direct call' do
- with_cross_joins_prevented do
- expect(subject.id).to eq(pipeline_b.id)
- end
- end
-
- it 'for preload' do
- environment.reload
-
- with_cross_joins_prevented do
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_pipeline: []])
- expect(subject.id).to eq(pipeline_b.id)
- end
- end
- end
-
- context 'for the environment' do
- it 'returns the last pipeline' do
- pipeline = create(:ci_pipeline, project: project, user: user, sha: commit.sha)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(pipeline)
- end
-
- context 'with multiple deployments' do
- it 'returns the last pipeline' do
- pipeline_a = create(:ci_pipeline, project: project, user: user)
- pipeline_b = create(:ci_pipeline, project: project, user: user)
- ci_build_a = create(:ci_build, project: project, pipeline: pipeline_a)
- ci_build_b = create(:ci_build, project: project, pipeline: pipeline_b)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_a)
- create(:deployment, :success, project: project, environment: environment, deployable: ci_build_b)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(pipeline_b)
- end
- end
-
- context 'with multiple pipelines' do
- it 'returns the last pipeline' do
- create(:ci_build, project: project, pipeline: success_pipeline)
- ci_build_b = create(:ci_build, project: project, pipeline: failed_pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build_b, sha: commit.sha)
-
- last_pipeline = environment.last_visible_pipeline
-
- expect(last_pipeline).to eq(failed_pipeline)
- end
- end
- end
-
- context 'call after preload' do
- it 'fetches from association cache' do
- pipeline = create(:ci_pipeline, project: project)
- ci_build = create(:ci_build, project: project, pipeline: pipeline)
- create(:deployment, :failed, project: project, environment: environment, deployable: ci_build)
-
- environment.reload
- ActiveRecord::Associations::Preloader.new.preload(environment, [last_visible_pipeline: []])
-
- query_count = ActiveRecord::QueryRecorder.new do
- expect(environment.last_visible_pipeline.id).to eq(pipeline.id)
- end.count
-
- expect(query_count).to eq(0)
- end
+ it 'fetches the pipeline through the last visible deployment' do
+ is_expected.to eq(pipeline)
end
end
diff --git a/spec/models/error_tracking/project_error_tracking_setting_spec.rb b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
index ebfd9f04f6a..0685144dea6 100644
--- a/spec/models/error_tracking/project_error_tracking_setting_spec.rb
+++ b/spec/models/error_tracking/project_error_tracking_setting_spec.rb
@@ -121,36 +121,36 @@ RSpec.describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
end
- end
- describe 'before_validation :reset_token' do
- context 'when a token was previously set' do
- subject { create(:project_error_tracking_setting, project: project) }
+ describe 'before_validation :reset_token' do
+ context 'when a token was previously set' do
+ subject { create(:project_error_tracking_setting, project: project) }
- it 'resets token if url changed' do
- subject.api_url = 'http://sentry.com/api/0/projects/org-slug/proj-slug/'
+ it 'resets token if url changed' do
+ subject.api_url = 'http://sentry.com/api/0/projects/org-slug/proj-slug/'
- expect(subject).not_to be_valid
- expect(subject.token).to be_nil
- end
+ expect(subject).not_to be_valid
+ expect(subject.token).to be_nil
+ end
- it "does not reset token if new url is set together with the same token" do
- subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
- current_token = subject.token
- subject.token = current_token
+ it "does not reset token if new url is set together with the same token" do
+ subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
+ current_token = subject.token
+ subject.token = current_token
- expect(subject).to be_valid
- expect(subject.token).to eq(current_token)
- expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
- end
+ expect(subject).to be_valid
+ expect(subject.token).to eq(current_token)
+ expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ end
- it 'does not reset token if new url is set together with a new token' do
- subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
- subject.token = 'token'
+ it 'does not reset token if new url is set together with a new token' do
+ subject.api_url = 'http://sentrytest.com/api/0/projects/org-slug/proj-slug/'
+ subject.token = 'token'
- expect(subject).to be_valid
- expect(subject.token).to eq('token')
- expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ expect(subject).to be_valid
+ expect(subject.token).to eq('token')
+ expect(subject.api_url).to eq('http://sentrytest.com/api/0/projects/org-slug/proj-slug/')
+ end
end
end
end
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 2c1bbfcb35f..9700852e567 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -264,6 +264,8 @@ RSpec.describe Event do
let(:project) { public_project }
let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) }
let(:confidential_issue) { create(:issue, :confidential, project: project, author: author, assignees: [assignee]) }
+ let(:work_item) { create(:work_item, project: project, author: author) }
+ let(:confidential_work_item) { create(:work_item, :confidential, project: project, author: author) }
let(:project_snippet) { create(:project_snippet, :public, project: project, author: author) }
let(:personal_snippet) { create(:personal_snippet, :public, author: author) }
let(:design) { create(:design, issue: issue, project: project) }
@@ -380,6 +382,28 @@ RSpec.describe Event do
end
end
+ context 'work item event' do
+ context 'for non confidential work item' do
+ let(:target) { work_item }
+
+ include_examples 'visibility examples' do
+ let(:visibility) { visible_to_all }
+ end
+
+ include_examples 'visible to assignee and author', true
+ end
+
+ context 'for confidential work item' do
+ let(:target) { confidential_work_item }
+
+ include_examples 'visibility examples' do
+ let(:visibility) { visible_to_none_except(:member, :admin) }
+ end
+
+ include_examples 'visible to author', true
+ end
+ end
+
context 'issue note event' do
context 'on non confidential issues' do
let(:target) { note_on_issue }
@@ -947,7 +971,7 @@ RSpec.describe Event do
let_it_be(:user) { create(:user) }
let_it_be(:note_on_project_snippet) { create(:note_on_project_snippet, author: user) }
let_it_be(:note_on_personal_snippet) { create(:note_on_personal_snippet, author: user) }
- let_it_be(:other_note) { create(:note_on_issue, author: user)}
+ let_it_be(:other_note) { create(:note_on_issue, author: user) }
let_it_be(:personal_snippet_event) { create(:event, :commented, project: nil, target: note_on_personal_snippet, author: user) }
let_it_be(:project_snippet_event) { create(:event, :commented, project: note_on_project_snippet.project, target: note_on_project_snippet, author: user) }
let_it_be(:other_event) { create(:event, :commented, project: other_note.project, target: other_note, author: user) }
diff --git a/spec/models/group_group_link_spec.rb b/spec/models/group_group_link_spec.rb
index 72c700e7981..969987c7e64 100644
--- a/spec/models/group_group_link_spec.rb
+++ b/spec/models/group_group_link_spec.rb
@@ -24,12 +24,60 @@ RSpec.describe GroupGroupLink do
it 'returns all records which are greater than Guests access' do
expect(described_class.non_guests).to match_array([
- group_group_link_reporter, group_group_link,
- group_group_link_maintainer, group_group_link_owner
+ group_group_link_reporter, group_group_link,
+ group_group_link_maintainer, group_group_link_owner
])
end
end
+ describe '.with_owner_or_maintainer_access' do
+ let_it_be(:group_group_link_maintainer) { create :group_group_link, :maintainer }
+ let_it_be(:group_group_link_owner) { create :group_group_link, :owner }
+ let_it_be(:group_group_link_reporter) { create :group_group_link, :reporter }
+ let_it_be(:group_group_link_guest) { create :group_group_link, :guest }
+
+ it 'returns all records which have OWNER or MAINTAINER access' do
+ expect(described_class.with_owner_or_maintainer_access).to match_array([
+ group_group_link_maintainer,
+ group_group_link_owner
+ ])
+ end
+ end
+
+ context 'for access via group shares' do
+ let_it_be(:shared_with_group_1) { create(:group) }
+ let_it_be(:shared_with_group_2) { create(:group) }
+ let_it_be(:shared_with_group_3) { create(:group) }
+ let_it_be(:shared_group_1) { create(:group) }
+ let_it_be(:shared_group_2) { create(:group) }
+ let_it_be(:shared_group_3) { create(:group) }
+ let_it_be(:shared_group_1_subgroup) { create(:group, parent: shared_group_1) }
+
+ before do
+ create :group_group_link, shared_with_group: shared_with_group_1, shared_group: shared_group_1
+ create :group_group_link, shared_with_group: shared_with_group_2, shared_group: shared_group_2
+ create :group_group_link, shared_with_group: shared_with_group_3, shared_group: shared_group_3
+ end
+
+ describe '.groups_accessible_via' do
+ it 'returns other groups that you can get access to, via the group shares of the specified groups' do
+ group_ids = [shared_with_group_1.id, shared_with_group_2.id]
+ expected_result = Group.id_in([shared_group_1.id, shared_group_1_subgroup.id, shared_group_2.id])
+
+ expect(described_class.groups_accessible_via(group_ids)).to match_array(expected_result)
+ end
+ end
+
+ describe '.groups_having_access_to' do
+ it 'returns all other groups that are having access to these specified groups, via group share' do
+ group_ids = [shared_group_1.id, shared_group_2.id]
+ expected_result = Group.id_in([shared_with_group_1.id, shared_with_group_2.id])
+
+ expect(described_class.groups_having_access_to(group_ids)).to match_array(expected_result)
+ end
+ end
+ end
+
describe '.distinct_on_shared_with_group_id_with_group_access' do
let_it_be(:sub_shared_group) { create(:group, parent: shared_group) }
let_it_be(:other_group) { create(:group) }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index e8e805b2678..61662411ac8 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -359,7 +359,7 @@ RSpec.describe Group do
context 'parent is updated' do
let(:new_parent) { create(:group) }
- subject {group.update!(parent: new_parent, name: 'new name') }
+ subject { group.update!(parent: new_parent, name: 'new name') }
it_behaves_like 'update on column', :traversal_ids
end
@@ -806,6 +806,20 @@ RSpec.describe Group do
end
end
+ describe '.project_creation_allowed' do
+ let_it_be(:group_1) { create(:group, project_creation_level: Gitlab::Access::NO_ONE_PROJECT_ACCESS) }
+ let_it_be(:group_2) { create(:group, project_creation_level: Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS) }
+ let_it_be(:group_3) { create(:group, project_creation_level: Gitlab::Access::MAINTAINER_PROJECT_ACCESS) }
+ let_it_be(:group_4) { create(:group, project_creation_level: nil) }
+
+ it 'only includes groups where project creation is allowed' do
+ result = described_class.project_creation_allowed
+
+ expect(result).to include(group_2, group_3, group_4)
+ expect(result).not_to include(group_1)
+ end
+ end
+
describe 'by_ids_or_paths' do
let(:group_path) { 'group_path' }
let!(:group) { create(:group, path: group_path) }
@@ -2603,7 +2617,11 @@ RSpec.describe Group do
it 'does not enable shared runners' do
expect do
- subject rescue nil
+ begin
+ subject
+ rescue StandardError
+ nil
+ end
parent.reload
group.reload
@@ -2704,7 +2722,11 @@ RSpec.describe Group do
it 'does not allow descendants to override' do
expect do
- subject rescue nil
+ begin
+ subject
+ rescue StandardError
+ nil
+ end
parent.reload
group.reload
@@ -2848,7 +2870,7 @@ RSpec.describe Group do
end
context 'for subgroup project' do
- let_it_be(:subgroup) { create(:group, :private, parent: group)}
+ let_it_be(:subgroup) { create(:group, :private, parent: group) }
let_it_be(:project) { create(:project, group: subgroup, service_desk_enabled: true) }
it { is_expected.to eq(true) }
@@ -3383,6 +3405,20 @@ RSpec.describe Group do
end
end
+ describe '#work_items_mvc_2_feature_flag_enabled?' do
+ it_behaves_like 'checks self and root ancestor feature flag' do
+ let(:feature_flag) { :work_items_mvc_2 }
+ let(:feature_flag_method) { :work_items_mvc_2_feature_flag_enabled? }
+ end
+ end
+
+ describe '#work_items_create_from_markdown_feature_flag_enabled?' do
+ it_behaves_like 'checks self and root ancestor feature flag' do
+ let(:feature_flag) { :work_items_create_from_markdown }
+ let(:feature_flag_method) { :work_items_create_from_markdown_feature_flag_enabled? }
+ end
+ end
+
describe 'group shares' do
let!(:sub_group) { create(:group, parent: group) }
let!(:sub_sub_group) { create(:group, parent: sub_group) }
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index 9f5f81dd6c0..f4786083b75 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe SystemHook do
let(:project) { create(:project, namespace: user.namespace) }
let(:group) { create(:group) }
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jg@example.com', password: 'mydummypass' }
+ { name: 'John Doe', username: 'jduser', email: 'jg@example.com', password: User.random_password }
end
before do
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 9faa5e1567c..036d2effc0f 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -482,12 +482,6 @@ RSpec.describe WebHook do
expect(hook).not_to be_temporarily_disabled
end
-
- it 'can ignore the feature flag' do
- stub_feature_flags(web_hooks_disable_failed: false)
-
- expect(hook).to be_temporarily_disabled(ignore_flag: true)
- end
end
end
@@ -510,12 +504,6 @@ RSpec.describe WebHook do
expect(hook).not_to be_permanently_disabled
end
-
- it 'can ignore the feature flag' do
- stub_feature_flags(web_hooks_disable_failed: false)
-
- expect(hook).to be_permanently_disabled(ignore_flag: true)
- end
end
end
diff --git a/spec/models/incident_management/issuable_escalation_status_spec.rb b/spec/models/incident_management/issuable_escalation_status_spec.rb
index 39d1fb325f5..f87e6e8327a 100644
--- a/spec/models/incident_management/issuable_escalation_status_spec.rb
+++ b/spec/models/incident_management/issuable_escalation_status_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe IncidentManagement::IssuableEscalationStatus do
describe 'associations' do
it { is_expected.to belong_to(:issue) }
+
it do
is_expected.to have_one(:project).through(:issue).inverse_of(:incident_management_issuable_escalation_statuses)
end
diff --git a/spec/models/incident_management/timeline_event_spec.rb b/spec/models/incident_management/timeline_event_spec.rb
index 17150fc9266..9f4011fe6a7 100644
--- a/spec/models/incident_management/timeline_event_spec.rb
+++ b/spec/models/incident_management/timeline_event_spec.rb
@@ -47,11 +47,20 @@ RSpec.describe IncidentManagement::TimelineEvent do
describe '#cache_markdown_field' do
let(:note) { 'note **bold** _italic_ `code` ![image](/path/img.png) :+1:👍' }
+
+ let(:expected_image_html) do
+ '<a class="with-attachment-icon" href="/path/img.png" target="_blank" rel="noopener noreferrer">image</a>'
+ end
+
+ # rubocop:disable Layout/LineLength
+ let(:expected_emoji_html) do
+ '<gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji><gl-emoji title="thumbs up sign" data-name="thumbsup" data-unicode-version="6.0">👍</gl-emoji>'
+ end
+
let(:expected_note_html) do
- # rubocop:disable Layout/LineLength
- '<p>note <strong>bold</strong> <em>italic</em> <code>code</code> <a class="with-attachment-icon" href="/path/img.png" target="_blank" rel="noopener noreferrer">image</a> 👍👍</p>'
- # rubocop:enable Layout/LineLength
+ %Q(<p>note <strong>bold</strong> <em>italic</em> <code>code</code> #{expected_image_html} #{expected_emoji_html}</p>)
end
+ # rubocop:enable Layout/LineLength
before do
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_call_original
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 86074765c7b..950f2c639fb 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -11,9 +11,8 @@ RSpec.describe Integration do
describe "Associations" do
it { is_expected.to belong_to(:project).inverse_of(:integrations) }
it { is_expected.to belong_to(:group).inverse_of(:integrations) }
- it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
- it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::IssueTrackerData') }
- it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:service_id).class_name('Integrations::JiraTrackerData') }
+ it { is_expected.to have_one(:issue_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:integration_id).class_name('Integrations::IssueTrackerData') }
+ it { is_expected.to have_one(:jira_tracker_data).autosave(true).inverse_of(:integration).with_foreign_key(:integration_id).class_name('Integrations::JiraTrackerData') }
end
describe 'validations' do
diff --git a/spec/models/integrations/bamboo_spec.rb b/spec/models/integrations/bamboo_spec.rb
index 574b87d6c60..e92226d109f 100644
--- a/spec/models/integrations/bamboo_spec.rb
+++ b/spec/models/integrations/bamboo_spec.rb
@@ -33,6 +33,7 @@ RSpec.describe Integrations::Bamboo, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:build_key) }
it { is_expected.to validate_presence_of(:bamboo_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :bamboo_url
describe '#username' do
diff --git a/spec/models/integrations/bugzilla_spec.rb b/spec/models/integrations/bugzilla_spec.rb
index 432306c8fa8..f05bc26d066 100644
--- a/spec/models/integrations/bugzilla_spec.rb
+++ b/spec/models/integrations/bugzilla_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::Bugzilla do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/buildkite_spec.rb b/spec/models/integrations/buildkite_spec.rb
index af2e587dc7b..38ceb5db49c 100644
--- a/spec/models/integrations/buildkite_spec.rb
+++ b/spec/models/integrations/buildkite_spec.rb
@@ -30,6 +30,7 @@ RSpec.describe Integrations::Buildkite, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:token) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
end
diff --git a/spec/models/integrations/campfire_spec.rb b/spec/models/integrations/campfire_spec.rb
index 48e24299bbd..a6bcd22b6f6 100644
--- a/spec/models/integrations/campfire_spec.rb
+++ b/spec/models/integrations/campfire_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Integrations::Campfire do
describe 'Validations' do
it { is_expected.to validate_numericality_of(:room).is_greater_than(0).only_integer }
- it { is_expected.to validate_length_of(:subdomain).is_at_most(63) }
+ it { is_expected.to validate_length_of(:subdomain).is_at_least(1).is_at_most(63).allow_blank }
it { is_expected.to allow_value("foo").for(:subdomain) }
it { is_expected.not_to allow_value("foo.bar").for(:subdomain) }
it { is_expected.not_to allow_value("foo.bar/#").for(:subdomain) }
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index 7026a314b78..4a86322cdaf 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
end
it 'returns a message regarding closing of issues' do
- expect(subject.pretext). to eq(
+ expect(subject.pretext).to eq(
'[<http://somewhere.com|project_name>] Issue <http://url.com|#100 Issue title> closed by Test User (test.user)')
expect(subject.attachments).to be_empty
end
@@ -111,7 +111,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
end
it 'returns a message regarding closing of issues' do
- expect(subject.pretext). to eq(
+ expect(subject.pretext).to eq(
'[[project_name](http://somewhere.com)] Issue [#100 Issue title](http://url.com) closed by Test User (test.user)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
diff --git a/spec/models/integrations/chat_message/wiki_page_message_spec.rb b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
index 4aa96c7e031..16659311c52 100644
--- a/spec/models/integrations/chat_message/wiki_page_message_spec.rb
+++ b/spec/models/integrations/chat_message/wiki_page_message_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Integrations::ChatMessage::WikiPageMessage do
let(:username) { 'test.user' }
let(:avatar_url) { 'http://someavatar.com' }
let(:project_name) { 'project_name' }
- let(:project_url) {'http://somewhere.com' }
+ let(:project_url) { 'http://somewhere.com' }
let(:url) { 'http://url.com' }
let(:diff_url) { 'http://url.com/diff?version_id=1234' }
let(:wiki_page_title) { 'Wiki page title' }
diff --git a/spec/models/integrations/custom_issue_tracker_spec.rb b/spec/models/integrations/custom_issue_tracker_spec.rb
index e1ffe7a74f0..11f98b99bbe 100644
--- a/spec/models/integrations/custom_issue_tracker_spec.rb
+++ b/spec/models/integrations/custom_issue_tracker_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::CustomIssueTracker do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/datadog_spec.rb b/spec/models/integrations/datadog_spec.rb
index 47f916e8457..cfc44b22a84 100644
--- a/spec/models/integrations/datadog_spec.rb
+++ b/spec/models/integrations/datadog_spec.rb
@@ -240,20 +240,4 @@ RSpec.describe Integrations::Datadog do
end
end
end
-
- describe '#fields' do
- it 'includes the archive_trace_events field' do
- expect(instance.fields).to include(have_attributes(name: 'archive_trace_events'))
- end
-
- context 'when the FF :datadog_integration_logs_collection is disabled' do
- before do
- stub_feature_flags(datadog_integration_logs_collection: false)
- end
-
- it 'does not include the archive_trace_events field' do
- expect(instance.fields).not_to include(have_attributes(name: 'archive_trace_events'))
- end
- end
- end
end
diff --git a/spec/models/integrations/drone_ci_spec.rb b/spec/models/integrations/drone_ci_spec.rb
index 5ae4af1a665..8a51f8a0705 100644
--- a/spec/models/integrations/drone_ci_spec.rb
+++ b/spec/models/integrations/drone_ci_spec.rb
@@ -19,6 +19,7 @@ RSpec.describe Integrations::DroneCi, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:token) }
it { is_expected.to validate_presence_of(:drone_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :drone_url
end
diff --git a/spec/models/integrations/ewm_spec.rb b/spec/models/integrations/ewm_spec.rb
index 49681fefe55..dc48a2c982f 100644
--- a/spec/models/integrations/ewm_spec.rb
+++ b/spec/models/integrations/ewm_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Integrations::Ewm do
it { is_expected.to validate_presence_of(:project_url) }
it { is_expected.to validate_presence_of(:issues_url) }
it { is_expected.to validate_presence_of(:new_issue_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :project_url
it_behaves_like 'issue tracker integration URL attribute', :issues_url
it_behaves_like 'issue tracker integration URL attribute', :new_issue_url
diff --git a/spec/models/integrations/external_wiki_spec.rb b/spec/models/integrations/external_wiki_spec.rb
index 1621605d39f..8644e20690c 100644
--- a/spec/models/integrations/external_wiki_spec.rb
+++ b/spec/models/integrations/external_wiki_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Integrations::ExternalWiki do
end
it { is_expected.to validate_presence_of(:external_wiki_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :external_wiki_url
end
diff --git a/spec/models/integrations/harbor_spec.rb b/spec/models/integrations/harbor_spec.rb
index 5d8597969a1..3952495119a 100644
--- a/spec/models/integrations/harbor_spec.rb
+++ b/spec/models/integrations/harbor_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Integrations::Harbor do
it { is_expected.not_to allow_value('https://192.168.1.1').for(:url) }
it { is_expected.not_to allow_value('https://127.0.0.1').for(:url) }
- it { is_expected.to allow_value('https://demo.goharbor.io').for(:url)}
+ it { is_expected.to allow_value('https://demo.goharbor.io').for(:url) }
end
describe '#fields' do
@@ -63,6 +63,8 @@ RSpec.describe Integrations::Harbor do
it 'returns vars when harbor_integration is activated' do
ci_vars = [
{ key: 'HARBOR_URL', value: url },
+ { key: 'HARBOR_HOST', value: 'demo.goharbor.io' },
+ { key: 'HARBOR_OCI', value: 'oci://demo.goharbor.io' },
{ key: 'HARBOR_PROJECT', value: project_name },
{ key: 'HARBOR_USERNAME', value: username },
{ key: 'HARBOR_PASSWORD', value: password, public: false, masked: true }
diff --git a/spec/models/integrations/jira_spec.rb b/spec/models/integrations/jira_spec.rb
index 01c08a0948f..a52a4514ebe 100644
--- a/spec/models/integrations/jira_spec.rb
+++ b/spec/models/integrations/jira_spec.rb
@@ -619,6 +619,18 @@ RSpec.describe Integrations::Jira do
close_issue
end
+ it_behaves_like 'Snowplow event tracking' do
+ subject { close_issue }
+
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Jira' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:user) { current_user }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { 'i_ecosystem_jira_service_close_issue' }
+ end
+
it 'does not fail if remote_link.all on issue returns nil' do
allow(JIRA::Resource::Remotelink).to receive(:all).and_return(nil)
@@ -962,6 +974,16 @@ RSpec.describe Integrations::Jira do
subject
end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Jira' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:user) { current_user }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { 'i_ecosystem_jira_service_cross_reference' }
+ end
end
context 'for commits' do
diff --git a/spec/models/integrations/microsoft_teams_spec.rb b/spec/models/integrations/microsoft_teams_spec.rb
index af6c142525c..b1b3e42b5e9 100644
--- a/spec/models/integrations/microsoft_teams_spec.rb
+++ b/spec/models/integrations/microsoft_teams_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe Integrations::MicrosoftTeams do
end
it { is_expected.to validate_presence_of(:webhook) }
+
it_behaves_like 'issue tracker integration URL attribute', :webhook
end
diff --git a/spec/models/integrations/pumble_spec.rb b/spec/models/integrations/pumble_spec.rb
new file mode 100644
index 00000000000..8b9b5d214c6
--- /dev/null
+++ b/spec/models/integrations/pumble_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Integrations::Pumble do
+ it_behaves_like "chat integration", "Pumble" do
+ let(:client_arguments) { webhook_url }
+ let(:payload) do
+ {
+ text: be_present
+ }
+ end
+ end
+end
diff --git a/spec/models/integrations/slack_spec.rb b/spec/models/integrations/slack_spec.rb
index 5801a4c3749..ed282f1d39d 100644
--- a/spec/models/integrations/slack_spec.rb
+++ b/spec/models/integrations/slack_spec.rb
@@ -6,7 +6,8 @@ RSpec.describe Integrations::Slack do
it_behaves_like Integrations::SlackMattermostNotifier, "Slack"
describe '#execute' do
- let_it_be(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all') }
+ let(:slack_integration) { create(:integrations_slack, branches_to_be_notified: 'all', project_id: project.id) }
+ let(:project) { create_default(:project, :repository, :wiki_repo) }
before do
stub_request(:post, slack_integration.webhook)
@@ -20,13 +21,23 @@ RSpec.describe Integrations::Slack do
context 'hook data includes a user object' do
let_it_be(:user) { create_default(:user) }
- let_it_be(:project) { create_default(:project, :repository, :wiki_repo) }
shared_examples 'increases the usage data counter' do |event_name|
+ subject(:execute) { slack_integration.execute(data) }
+
it 'increases the usage data counter' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: user.id).and_call_original
- slack_integration.execute(data)
+ execute
+ end
+
+ it_behaves_like 'Snowplow event tracking' do
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+ let(:category) { 'Integrations::Slack' }
+ let(:action) { 'perform_integrations_action' }
+ let(:namespace) { project.namespace }
+ let(:label) { 'redis_hll_counters.ecosystem.ecosystem_total_unique_counts_monthly' }
+ let(:property) { event_name }
end
end
diff --git a/spec/models/integrations/teamcity_spec.rb b/spec/models/integrations/teamcity_spec.rb
index 046476225a6..da559264c1e 100644
--- a/spec/models/integrations/teamcity_spec.rb
+++ b/spec/models/integrations/teamcity_spec.rb
@@ -76,6 +76,7 @@ RSpec.describe Integrations::Teamcity, :use_clean_rails_memory_store_caching do
it { is_expected.to validate_presence_of(:build_type) }
it { is_expected.to validate_presence_of(:teamcity_url) }
+
it_behaves_like 'issue tracker integration URL attribute', :teamcity_url
describe '#username' do
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 89c440dc49c..15fe6d7625a 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -69,7 +69,57 @@ RSpec.describe Issue do
end
describe 'validations' do
- subject { issue.valid? }
+ subject(:valid?) { issue.valid? }
+
+ describe 'due_date_after_start_date' do
+ let(:today) { Date.today }
+
+ context 'when both values are not present' do
+ let(:issue) { build(:issue) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when start date is present and due date is not' do
+ let(:issue) { build(:work_item, start_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is present and start date is not' do
+ let(:issue) { build(:work_item, due_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when both date values are present' do
+ context 'when due date is greater than start date' do
+ let(:issue) { build(:work_item, start_date: today, due_date: 1.week.from_now) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is equal to start date' do
+ let(:issue) { build(:work_item, start_date: today, due_date: today) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when due date is before start date' do
+ let(:issue) { build(:work_item, due_date: today, start_date: 1.week.from_now) }
+
+ it { is_expected.to be_falsey }
+
+ it 'adds an error message' do
+ valid?
+
+ expect(issue.errors.full_messages).to contain_exactly(
+ 'Due date must be greater than or equal to start date'
+ )
+ end
+ end
+ end
+ end
describe 'issue_type' do
let(:issue) { build(:issue, issue_type: issue_type) }
@@ -86,6 +136,54 @@ RSpec.describe Issue do
it { is_expected.to eq(false) }
end
end
+
+ describe 'confidentiality' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when parent and child are confidential' do
+ let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make child not-confidential' do
+ issue = Issue.find(child.id)
+ issue.confidential = false
+
+ expect(issue).not_to be_valid
+ expect(issue.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+
+ it 'allows to make parent not-confidential' do
+ issue = Issue.find(parent.id)
+ issue.confidential = false
+
+ expect(issue).to be_valid
+ end
+ end
+
+ context 'when parent and child are not-confidential' do
+ let_it_be(:parent) { create(:work_item, project: project) }
+ let_it_be(:child) { create(:work_item, :task, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make parent confidential' do
+ issue = Issue.find(parent.id)
+ issue.confidential = true
+
+ expect(issue).not_to be_valid
+ expect(issue.errors[:confidential])
+ .to include('confidential parent can not be used if there are non-confidential children.')
+ end
+
+ it 'allows to make child confidential' do
+ issue = Issue.find(child.id)
+ issue.confidential = true
+
+ expect(issue).to be_valid
+ end
+ end
+ end
end
subject { create(:issue, project: reusable_project) }
@@ -124,6 +222,61 @@ RSpec.describe Issue do
end
end
+ describe '#ensure_work_item_type' do
+ let_it_be(:issue_type) { create(:work_item_type, :issue, :default) }
+ let_it_be(:task_type) { create(:work_item_type, :issue, :default) }
+ let_it_be(:project) { create(:project) }
+
+ context 'when a type was already set' do
+ let_it_be(:issue, refind: true) { create(:issue, project: project) }
+
+ it 'does not fetch a work item type from the DB' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ expect(issue).to be_valid
+ end
+
+ it 'does not fetch a work item type from the DB when updating the type' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ issue.update!(work_item_type: task_type, issue_type: 'task')
+
+ expect(issue.work_item_type_id).to eq(task_type.id)
+ end
+
+ it 'ensures a work item type if updated to nil' do
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+
+ expect do
+ issue.update!(work_item_type: nil)
+ end.to not_change(issue, :work_item_type).from(issue_type)
+ end
+ end
+
+ context 'when no type was set' do
+ let_it_be(:issue, refind: true) { build(:issue, project: project, work_item_type: nil).tap { |issue| issue.save!(validate: false) } }
+
+ it 'sets a work item type before validation' do
+ expect(issue.work_item_type_id).to be_nil
+
+ issue.save!
+
+ expect(issue.work_item_type_id).to eq(issue_type.id)
+ end
+
+ it 'does not fetch type from DB if provided during update' do
+ expect(issue.work_item_type_id).to be_nil
+ expect(WorkItems::Type).not_to receive(:default_by_type)
+
+ issue.update!(work_item_type: task_type, issue_type: 'task')
+
+ expect(issue.work_item_type_id).to eq(task_type.id)
+ end
+ end
+ end
+
describe '#record_create_action' do
it 'records the creation action after saving' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
@@ -289,7 +442,7 @@ RSpec.describe Issue do
# TODO: Remove when NOT NULL constraint is added to the relationship
describe '#work_item_type' do
- let(:issue) { create(:issue, :incident, project: reusable_project, work_item_type: nil) }
+ let(:issue) { build(:issue, :incident, project: reusable_project, work_item_type: nil).tap { |issue| issue.save!(validate: false) } }
it 'returns a default type if the legacy issue does not have a work item type associated yet' do
expect(issue.work_item_type_id).to be_nil
@@ -493,7 +646,7 @@ RSpec.describe Issue do
let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
- let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project )}
+ let_it_be(:authorized_incident_a) { create(:incident, project: authorized_project ) }
let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
@@ -550,7 +703,7 @@ RSpec.describe Issue do
subject { issue.can_move?(user) }
context 'user is not a member of project issue belongs to' do
- it { is_expected.to eq false}
+ it { is_expected.to eq false }
end
context 'user is reporter in project issue belongs to' do
@@ -1074,7 +1227,7 @@ RSpec.describe Issue do
end
context 'when issue is moved to a private project' do
- let(:private_project) { build(:project, :private)}
+ let(:private_project) { build(:project, :private) }
before do
issue.update!(project: private_project) # move issue to private project
@@ -1621,4 +1774,20 @@ RSpec.describe Issue do
end
end
end
+
+ describe '#full_search' do
+ context 'when searching non-english terms' do
+ [
+ 'abc 中文語',
+ '中文語cn',
+ '中文語'
+ ].each do |term|
+ it 'adds extra where clause to match partial index' do
+ expect(described_class.full_search(term).to_sql).to include(
+ "AND (issues.title NOT SIMILAR TO '[\\u0000-\\u218F]*' OR issues.description NOT SIMILAR TO '[\\u0000-\\u218F]*')"
+ )
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/jira_import_state_spec.rb b/spec/models/jira_import_state_spec.rb
index a272d001429..95e9594f885 100644
--- a/spec/models/jira_import_state_spec.rb
+++ b/spec/models/jira_import_state_spec.rb
@@ -25,25 +25,25 @@ RSpec.describe JiraImportState do
let(:project) { create(:project) }
context 'when project has an initial jira_import' do
- let!(:jira_import) { create(:jira_import_state, project: project)}
+ let!(:jira_import) { create(:jira_import_state, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a scheduled jira_import' do
- let!(:jira_import) { create(:jira_import_state, :scheduled, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :scheduled, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a started jira_import' do
- let!(:jira_import) { create(:jira_import_state, :started, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :started, project: project) }
it_behaves_like 'multiple running imports not allowed'
end
context 'when project has a failed jira_import' do
- let!(:jira_import) { create(:jira_import_state, :failed, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :failed, project: project) }
it 'returns valid' do
new_import = build(:jira_import_state, project: project)
@@ -54,7 +54,7 @@ RSpec.describe JiraImportState do
end
context 'when project has a finished jira_import' do
- let!(:jira_import) { create(:jira_import_state, :finished, project: project)}
+ let!(:jira_import) { create(:jira_import_state, :finished, project: project) }
it 'returns valid' do
new_import = build(:jira_import_state, project: project)
@@ -83,40 +83,40 @@ RSpec.describe JiraImportState do
let(:project) { create(:project) }
context 'when jira import is in initial state' do
- let!(:jira_import) { build(:jira_import_state, project: project)}
+ let!(:jira_import) { build(:jira_import_state, project: project) }
it_behaves_like 'can transition', [:schedule, :do_fail]
it_behaves_like 'cannot transition', [:start, :finish]
end
context 'when jira import is in scheduled state' do
- let!(:jira_import) { build(:jira_import_state, :scheduled, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :scheduled, project: project) }
it_behaves_like 'can transition', [:start, :do_fail]
it_behaves_like 'cannot transition', [:finish]
end
context 'when jira import is in started state' do
- let!(:jira_import) { build(:jira_import_state, :started, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :started, project: project) }
it_behaves_like 'can transition', [:finish, :do_fail]
it_behaves_like 'cannot transition', [:schedule]
end
context 'when jira import is in failed state' do
- let!(:jira_import) { build(:jira_import_state, :failed, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :failed, project: project) }
it_behaves_like 'cannot transition', [:schedule, :finish, :do_fail]
end
context 'when jira import is in finished state' do
- let!(:jira_import) { build(:jira_import_state, :finished, project: project)}
+ let!(:jira_import) { build(:jira_import_state, :finished, project: project) }
it_behaves_like 'cannot transition', [:schedule, :do_fail, :start]
end
context 'after transition to scheduled' do
- let!(:jira_import) { build(:jira_import_state, project: project)}
+ let!(:jira_import) { build(:jira_import_state, project: project) }
it 'triggers the import job' do
expect(Gitlab::JiraImport::Stage::StartImportWorker).to receive(:perform_async).and_return('some-job-id')
@@ -129,7 +129,7 @@ RSpec.describe JiraImportState do
end
context 'after transition to finished' do
- let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project)}
+ let!(:jira_import) { build(:jira_import_state, :started, jid: 'some-other-jid', project: project) }
subject { jira_import.finish }
@@ -172,7 +172,7 @@ RSpec.describe JiraImportState do
end
context 'when jira import has no error_message' do
- let(:jira_import) { build(:jira_import_state, project: project)}
+ let(:jira_import) { build(:jira_import_state, project: project) }
it 'does not run the callback', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
@@ -181,7 +181,7 @@ RSpec.describe JiraImportState do
end
context 'when jira import error_message does not exceed the limit' do
- let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error')}
+ let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error') }
it 'does not run the callback', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
@@ -190,7 +190,7 @@ RSpec.describe JiraImportState do
end
context 'when error_message exceeds limit' do
- let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error message longer than the limit')}
+ let(:jira_import) { build(:jira_import_state, project: project, error_message: 'error message longer than the limit') }
it 'truncates error_message to the limit', :aggregate_failures do
expect { jira_import.save! }.to change { JiraImportState.count }.by(1)
diff --git a/spec/models/lfs_object_spec.rb b/spec/models/lfs_object_spec.rb
index 5210709a468..c25d0451f18 100644
--- a/spec/models/lfs_object_spec.rb
+++ b/spec/models/lfs_object_spec.rb
@@ -193,9 +193,9 @@ RSpec.describe LfsObject do
end
describe '.unreferenced_in_batches' do
- let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') }
+ let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1' * 64) }
let!(:referenced_lfs_object) { create(:lfs_objects_project).lfs_object }
- let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2') }
+ let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2' * 64) }
it 'returns lfs objects in batches' do
stub_const('LfsObject::BATCH_SIZE', 1)
diff --git a/spec/models/loose_foreign_keys/deleted_record_spec.rb b/spec/models/loose_foreign_keys/deleted_record_spec.rb
index 23e0ed1f39d..9ee5b7340f3 100644
--- a/spec/models/loose_foreign_keys/deleted_record_spec.rb
+++ b/spec/models/loose_foreign_keys/deleted_record_spec.rb
@@ -94,14 +94,6 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
end
it { is_expected.to eq(true) }
-
- context 'when the lfk_automatic_partition_creation FF is off' do
- before do
- stub_feature_flags(lfk_automatic_partition_creation: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
@@ -126,14 +118,6 @@ RSpec.describe LooseForeignKeys::DeletedRecord, type: :model do
end
it { is_expected.to eq(true) }
-
- context 'when the lfk_automatic_partition_dropping FF is off' do
- before do
- stub_feature_flags(lfk_automatic_partition_dropping: false)
- end
-
- it { is_expected.to eq(false) }
- end
end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 286167c918f..2716244b7f3 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Member do
describe 'Associations' do
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:member_namespace) }
+ it { is_expected.to belong_to(:member_role) }
it { is_expected.to have_one(:member_task) }
end
@@ -166,6 +167,36 @@ RSpec.describe Member do
end
end
end
+
+ context 'member role access level' do
+ let_it_be(:member) { create(:group_member, access_level: Gitlab::Access::DEVELOPER) }
+
+ context 'no member role is associated' do
+ it 'is valid' do
+ expect(member).to be_valid
+ end
+ end
+
+ context 'member role is associated' do
+ let_it_be(:member_role) do
+ create(:member_role, members: [member])
+ end
+
+ context 'member role matches access level' do
+ it 'is valid' do
+ expect(member).to be_valid
+ end
+ end
+
+ context 'member role does not match access level' do
+ it 'is invalid' do
+ member_role.base_access_level = Gitlab::Access::MAINTAINER
+
+ expect(member).not_to be_valid
+ end
+ end
+ end
+ end
end
describe 'Scopes & finders' do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 94032146f51..c6266f15340 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -165,78 +165,28 @@ RSpec.describe GroupMember do
let_it_be(:project_b) { create(:project, group: group) }
let_it_be(:project_c) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
- let_it_be(:affected_project_ids) { Project.id_in([project_a, project_b, project_c]).ids }
- before do
- stub_const(
- "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
- affected_project_ids.size - 1)
- end
-
- shared_examples_for 'calls UserProjectAccessChangedService to recalculate authorizations' do
- it 'calls UserProjectAccessChangedService to recalculate authorizations' do
- expect_next_instance_of(UserProjectAccessChangedService, user.id) do |service|
- expect(service).to receive(:execute).with(blocking: blocking)
- end
+ shared_examples_for 'calls AuthorizedProjectsWorker inline to recalculate authorizations' do
+ # this is inline with the overridden behaviour in stubbed_member.rb
+ it 'calls AuthorizedProjectsWorker inline to recalculate authorizations' do
+ worker_instance = AuthorizedProjectsWorker.new
+ expect(AuthorizedProjectsWorker).to receive(:new).and_return(worker_instance)
+ expect(worker_instance).to receive(:perform).with(user.id)
action
end
end
- shared_examples_for 'tries to update permissions via refreshing authorizations for the affected projects' do
- context 'when the number of affected projects exceeds the set threshold' do
- it 'updates permissions via refreshing authorizations for the affected projects asynchronously' do
- expect_next_instance_of(
- AuthorizedProjectUpdate::ProjectAccessChangedService, affected_project_ids
- ) do |service|
- expect(service).to receive(:execute).with(blocking: false)
- end
-
- action
- end
-
- it 'calls AuthorizedProjectUpdate::UserRefreshFromReplicaWorker with a delay as a safety net' do
- expect(AuthorizedProjectUpdate::UserRefreshFromReplicaWorker).to(
- receive(:bulk_perform_in)
- .with(1.hour,
- [[user.id]],
- batch_delay: 30.seconds, batch_size: 100)
- )
-
- action
- end
- end
-
- context 'when the number of affected projects does not exceed the set threshold' do
- before do
- stub_const(
- "#{described_class.name}::THRESHOLD_FOR_REFRESHING_AUTHORIZATIONS_VIA_PROJECTS",
- affected_project_ids.size + 1)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
- end
-
context 'on create' do
let(:action) { group.add_member(user, Gitlab::Access::GUEST) }
- let(:blocking) { true }
- it 'changes access level', :sidekiq_inline do
+ it 'changes access level' do
expect { action }.to change { user.can?(:guest_access, project_a) }.from(false).to(true)
.and change { user.can?(:guest_access, project_b) }.from(false).to(true)
.and change { user.can?(:guest_access, project_c) }.from(false).to(true)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
-
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
+ it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
context 'on update' do
@@ -245,23 +195,14 @@ RSpec.describe GroupMember do
end
let(:action) { group.members.find_by(user: user).update!(access_level: Gitlab::Access::DEVELOPER) }
- let(:blocking) { true }
- it 'changes access level', :sidekiq_inline do
+ it 'changes access level' do
expect { action }.to change { user.can?(:developer_access, project_a) }.from(false).to(true)
.and change { user.can?(:developer_access, project_b) }.from(false).to(true)
.and change { user.can?(:developer_access, project_c) }.from(false).to(true)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
-
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
- end
+ it_behaves_like 'calls AuthorizedProjectsWorker inline to recalculate authorizations'
end
context 'on destroy' do
@@ -270,7 +211,6 @@ RSpec.describe GroupMember do
end
let(:action) { group.members.find_by(user: user).destroy! }
- let(:blocking) { false }
it 'changes access level', :sidekiq_inline do
expect { action }.to change { user.can?(:guest_access, project_a) }.from(true).to(false)
@@ -278,14 +218,10 @@ RSpec.describe GroupMember do
.and change { user.can?(:guest_access, project_c) }.from(true).to(false)
end
- it_behaves_like 'tries to update permissions via refreshing authorizations for the affected projects'
+ it 'schedules an AuthorizedProjectsWorker job to recalculate authorizations' do
+ expect(AuthorizedProjectsWorker).to receive(:bulk_perform_async).with([[user.id]])
- context 'when the feature flag `refresh_authorizations_via_affected_projects_on_group_membership` is disabled' do
- before do
- stub_feature_flags(refresh_authorizations_via_affected_projects_on_group_membership: false)
- end
-
- it_behaves_like 'calls UserProjectAccessChangedService to recalculate authorizations'
+ action
end
end
end
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
new file mode 100644
index 00000000000..e8993491918
--- /dev/null
+++ b/spec/models/members/member_role_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MemberRole do
+ describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
+ it { is_expected.to have_many(:members) }
+ end
+
+ describe 'validation' do
+ subject { described_class.new }
+
+ it { is_expected.to validate_presence_of(:namespace_id) }
+ it { is_expected.to validate_presence_of(:base_access_level) }
+ end
+end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 39d9d25a98c..99fc5dc14df 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -213,10 +213,11 @@ RSpec.describe ProjectMember do
let_it_be(:user) { create(:user) }
shared_examples_for 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline to recalculate authorizations' do
- it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker' do
- expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:bulk_perform_and_wait).with(
- [[project.id, user.id]]
- )
+ # this is inline with the overridden behaviour in stubbed_member.rb
+ it 'calls AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker inline' do
+ worker_instance = AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker.new
+ expect(AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker).to receive(:new).and_return(worker_instance)
+ expect(worker_instance).to receive(:perform).with(project.id, user.id)
action
end
diff --git a/spec/models/merge_request/approval_removal_settings_spec.rb b/spec/models/merge_request/approval_removal_settings_spec.rb
new file mode 100644
index 00000000000..5f879207a72
--- /dev/null
+++ b/spec/models/merge_request/approval_removal_settings_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequest::ApprovalRemovalSettings do
+ describe 'validations' do
+ let(:reset_approvals_on_push) {}
+ let(:selective_code_owner_removals) {}
+
+ subject { described_class.new(project, reset_approvals_on_push, selective_code_owner_removals) }
+
+ context 'when enabling selective_code_owner_removals and reset_approvals_on_push is disabled' do
+ let(:project) { create(:project, reset_approvals_on_push: false) }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when enabling selective_code_owner_removals and reset_approvals_on_push is enabled' do
+ let(:project) { create(:project) }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals is disabled' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals is enabled' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+
+ before do
+ project.project_setting.update!(selective_code_owner_removals: true)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when enabling reset_approvals_on_push and selective_code_owner_removals' do
+ let(:project) { create(:project) }
+ let(:reset_approvals_on_push) { true }
+ let(:selective_code_owner_removals) { true }
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index c3e325c4e6c..19026a4772d 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -114,10 +114,10 @@ RSpec.describe MergeRequest, factory_default: :keep do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
- let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1])}
- let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2])}
- let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: [])}
- let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request)}
+ let_it_be(:merge_request1) { create(:merge_request, :unique_branches, reviewers: [user1]) }
+ let_it_be(:merge_request2) { create(:merge_request, :unique_branches, reviewers: [user2]) }
+ let_it_be(:merge_request3) { create(:merge_request, :unique_branches, reviewers: []) }
+ let_it_be(:merge_request4) { create(:merge_request, :draft_merge_request) }
describe '.review_requested' do
it 'returns MRs that have any review requests' do
@@ -145,8 +145,8 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
describe '.attention' do
- let_it_be(:merge_request5) { create(:merge_request, :unique_branches, assignees: [user2])}
- let_it_be(:merge_request6) { create(:merge_request, :unique_branches, assignees: [user2])}
+ let_it_be(:merge_request5) { create(:merge_request, :unique_branches, assignees: [user2]) }
+ let_it_be(:merge_request6) { create(:merge_request, :unique_branches, assignees: [user2]) }
before do
assignee = merge_request6.find_assignee(user2)
@@ -2056,7 +2056,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when failed to find an actual head pipeline' do
before do
- allow(merge_request).to receive(:find_actual_head_pipeline) { }
+ allow(merge_request).to receive(:find_actual_head_pipeline) {}
end
it 'does not update the current head pipeline' do
@@ -3232,6 +3232,62 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#detailed_merge_status' do
+ subject(:detailed_merge_status) { merge_request.detailed_merge_status }
+
+ context 'when merge status is cannot_be_merged_rechecking' do
+ let(:merge_request) { create(:merge_request, merge_status: :cannot_be_merged_rechecking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is preparing' do
+ let(:merge_request) { create(:merge_request, merge_status: :preparing) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is checking' do
+ let(:merge_request) { create(:merge_request, merge_status: :checking) }
+
+ it 'returns :checking' do
+ expect(detailed_merge_status).to eq(:checking)
+ end
+ end
+
+ context 'when merge status is unchecked' do
+ let(:merge_request) { create(:merge_request, merge_status: :unchecked) }
+
+ it 'returns :unchecked' do
+ expect(detailed_merge_status).to eq(:unchecked)
+ end
+ end
+
+ context 'when merge checks are a success' do
+ let(:merge_request) { create(:merge_request) }
+
+ it 'returns :mergeable' do
+ expect(detailed_merge_status).to eq(:mergeable)
+ end
+ end
+
+ context 'when merge status have a failure' do
+ let(:merge_request) { create(:merge_request) }
+
+ before do
+ merge_request.close!
+ end
+
+ it 'returns the failure reason' do
+ expect(detailed_merge_status).to eq(:not_open)
+ end
+ end
+ end
+
describe '#mergeable_state?' do
it_behaves_like 'for mergeable_state'
@@ -4660,6 +4716,37 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#in_locked_state' do
+ let(:merge_request) { create(:merge_request, :opened) }
+
+ context 'when the merge request does not change state' do
+ it 'returns to previous state and has no errors on the object' do
+ expect(merge_request.opened?).to eq(true)
+
+ merge_request.in_locked_state do
+ expect(merge_request.locked?).to eq(true)
+ end
+
+ expect(merge_request.opened?).to eq(true)
+ expect(merge_request.errors).to be_empty
+ end
+ end
+
+ context 'when the merge request is merged while locked' do
+ it 'becomes merged and has no errors on the object' do
+ expect(merge_request.opened?).to eq(true)
+
+ merge_request.in_locked_state do
+ expect(merge_request.locked?).to eq(true)
+ merge_request.mark_as_merged!
+ end
+
+ expect(merge_request.merged?).to eq(true)
+ expect(merge_request.errors).to be_empty
+ end
+ end
+ end
+
describe '#cleanup_refs' do
subject { merge_request.cleanup_refs(only: only) }
@@ -5047,6 +5134,12 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '#merge_blocked_by_other_mrs?' do
+ it 'returns false when there is no blocking merge requests' do
+ expect(subject.merge_blocked_by_other_mrs?).to be_falsy
+ end
+ end
+
describe '#merge_request_reviewers_with' do
let_it_be(:reviewer1) { create(:user) }
let_it_be(:reviewer2) { create(:user) }
diff --git a/spec/models/milestone_spec.rb b/spec/models/milestone_spec.rb
index 72a57b6076a..af1383b68bf 100644
--- a/spec/models/milestone_spec.rb
+++ b/spec/models/milestone_spec.rb
@@ -257,7 +257,7 @@ RSpec.describe Milestone do
let(:milestone) { create(:milestone, title: 'foo', description: 'bar') }
it 'returns milestones with a matching title' do
- expect(described_class.search_title(milestone.title)) .to eq([milestone])
+ expect(described_class.search_title(milestone.title)).to eq([milestone])
end
it 'returns milestones with a partially matching title' do
@@ -272,7 +272,7 @@ RSpec.describe Milestone do
it 'searches only on the title and ignores milestones with a matching description' do
create(:milestone, title: 'bar', description: 'foo')
- expect(described_class.search_title(milestone.title)) .to eq([milestone])
+ expect(described_class.search_title(milestone.title)).to eq([milestone])
end
end
diff --git a/spec/models/ml/candidate_metric_spec.rb b/spec/models/ml/candidate_metric_spec.rb
new file mode 100644
index 00000000000..5ee6030fb8e
--- /dev/null
+++ b/spec/models/ml/candidate_metric_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidateMetric do
+ describe 'associations' do
+ it { is_expected.to belong_to(:candidate) }
+ end
+end
diff --git a/spec/models/ml/candidate_param_spec.rb b/spec/models/ml/candidate_param_spec.rb
new file mode 100644
index 00000000000..ff38e471219
--- /dev/null
+++ b/spec/models/ml/candidate_param_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::CandidateParam do
+ describe 'associations' do
+ it { is_expected.to belong_to(:candidate) }
+ end
+end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
new file mode 100644
index 00000000000..a48e291fa55
--- /dev/null
+++ b/spec/models/ml/candidate_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::Candidate do
+ describe 'associations' do
+ it { is_expected.to belong_to(:experiment) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:params) }
+ it { is_expected.to have_many(:metrics) }
+ end
+end
diff --git a/spec/models/ml/experiment_spec.rb b/spec/models/ml/experiment_spec.rb
new file mode 100644
index 00000000000..dca5280a8fe
--- /dev/null
+++ b/spec/models/ml/experiment_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ml::Experiment do
+ describe 'associations' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:user) }
+ it { is_expected.to have_many(:candidates) }
+ end
+end
diff --git a/spec/models/namespace/detail_spec.rb b/spec/models/namespace/detail_spec.rb
new file mode 100644
index 00000000000..1bb756c441b
--- /dev/null
+++ b/spec/models/namespace/detail_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespace::Detail, type: :model do
+ describe 'associations' do
+ it { is_expected.to belong_to :namespace }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+ end
+
+ context 'when namespace description changes' do
+ let(:namespace) { create(:namespace, description: "old") }
+
+ it 'changes namespace details description' do
+ expect { namespace.update!(description: "new") }
+ .to change { namespace.namespace_details.description }.from("old").to("new")
+ end
+ end
+
+ context 'when project description changes' do
+ let(:project) { create(:project, description: "old") }
+
+ it 'changes project namespace details description' do
+ expect { project.update!(description: "new") }
+ .to change { project.project_namespace.namespace_details.description }.from("old").to("new")
+ end
+ end
+
+ context 'when group description changes' do
+ let(:group) { create(:group, description: "old") }
+
+ it 'changes group namespace details description' do
+ expect { group.update!(description: "new") }
+ .to change { group.namespace_details.description }.from("old").to("new")
+ end
+ end
+end
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index d2ee0b40ed6..14ac08b545a 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -100,8 +100,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model do
it_behaves_like 'does not include personal snippets'
context 'with subgroups' do
- let(:subgroup1) { create(:group, parent: namespace)}
- let(:subgroup2) { create(:group, parent: subgroup1)}
+ let(:subgroup1) { create(:group, parent: namespace) }
+ let(:subgroup2) { create(:group, parent: subgroup1) }
let(:project1) { create(:project, namespace: subgroup1) }
let(:project2) { create(:project, namespace: subgroup2) }
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index 51932ab943c..918ff6aa154 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -85,7 +85,11 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
it 'increment db_deadlock counter' do
- expect { subject rescue nil }.to change { db_deadlock_total('Namespace#sync_traversal_ids!') }.by(1)
+ expect do
+ subject
+ rescue StandardError
+ nil
+ end.to change { db_deadlock_total('Namespace#sync_traversal_ids!') }.by(1)
end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 664cdb27290..71ce3afda44 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Namespace do
include ProjectForksHelper
- include GitHelpers
include ReloadHelpers
let_it_be(:group_sti_name) { Group.sti_name }
@@ -23,6 +22,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :root_storage_statistics }
it { is_expected.to have_one :aggregation_schedule }
it { is_expected.to have_one :namespace_settings }
+ it { is_expected.to have_one :namespace_details }
it { is_expected.to have_one(:namespace_statistics) }
it { is_expected.to have_many :custom_emoji }
it { is_expected.to have_one :package_setting_relation }
@@ -31,6 +31,7 @@ RSpec.describe Namespace do
it { is_expected.to have_many :pending_builds }
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
+ it { is_expected.to have_many :member_roles }
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
@@ -373,14 +374,6 @@ RSpec.describe Namespace do
context 'linear' do
it_behaves_like 'namespace traversal scopes'
-
- context 'without inner join ancestors query' do
- before do
- stub_feature_flags(use_traversal_ids_for_ancestor_scopes_with_inner_join: false)
- end
-
- it_behaves_like 'namespace traversal scopes'
- end
end
shared_examples 'makes recursive queries' do
@@ -1075,9 +1068,9 @@ RSpec.describe Namespace do
it 'updates project full path in .git/config' do
parent.update!(path: 'mygroup_new')
- expect(project_rugged(project_in_parent_group).config['gitlab.fullpath']).to eq "mygroup_new/#{project_in_parent_group.path}"
- expect(project_rugged(hashed_project_in_subgroup).config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
- expect(project_rugged(legacy_project_in_subgroup).config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
+ expect(project_in_parent_group.reload.repository.full_path).to eq "mygroup_new/#{project_in_parent_group.path}"
+ expect(hashed_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
+ expect(legacy_project_in_subgroup.reload.repository.full_path).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
end
it 'updates the project storage location' do
@@ -1091,14 +1084,6 @@ RSpec.describe Namespace do
expect(repository_hashed_project_in_subgroup.reload.disk_path).to eq hashed_project_in_subgroup.disk_path
expect(repository_legacy_project_in_subgroup.reload.disk_path).to eq "mygroup_moved/mysubgroup/#{legacy_project_in_subgroup.path}"
end
-
- def project_rugged(project)
- # Routes are loaded when creating the projects, so we need to manually
- # reload them for the below code to be aware of the above UPDATE.
- project.route.reload
-
- rugged_repo(project.repository)
- end
end
end
@@ -1556,7 +1541,7 @@ RSpec.describe Namespace do
describe '#share_with_group_lock with subgroups' do
context 'when creating a subgroup' do
- let(:subgroup) { create(:group, parent: root_group )}
+ let(:subgroup) { create(:group, parent: root_group ) }
context 'under a parent with "Share with group lock" enabled' do
let(:root_group) { create(:group, share_with_group_lock: true) }
@@ -1577,7 +1562,7 @@ RSpec.describe Namespace do
context 'when enabling the parent group "Share with group lock"' do
let(:root_group) { create(:group) }
- let!(:subgroup) { create(:group, parent: root_group )}
+ let!(:subgroup) { create(:group, parent: root_group ) }
it 'the subgroup "Share with group lock" becomes enabled' do
root_group.update!(share_with_group_lock: true)
@@ -1590,7 +1575,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group, share_with_group_lock: true) }
context 'and the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, parent: root_group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, parent: root_group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
root_group.update!(share_with_group_lock: false)
@@ -1600,7 +1585,7 @@ RSpec.describe Namespace do
end
context 'but the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group, parent: root_group )}
+ let(:subgroup) { create(:group, parent: root_group ) }
it 'the subgroup "Share with group lock" does not change' do
root_group.update!(share_with_group_lock: false)
@@ -1615,7 +1600,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group, share_with_group_lock: true) }
context 'when the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -1626,7 +1611,7 @@ RSpec.describe Namespace do
end
context 'when the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group)}
+ let(:subgroup) { create(:group) }
it 'the subgroup "Share with group lock" becomes enabled' do
subgroup.parent = root_group
@@ -1641,7 +1626,7 @@ RSpec.describe Namespace do
let(:root_group) { create(:group) }
context 'when the subgroup "Share with group lock" is enabled' do
- let(:subgroup) { create(:group, share_with_group_lock: true )}
+ let(:subgroup) { create(:group, share_with_group_lock: true ) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -1652,7 +1637,7 @@ RSpec.describe Namespace do
end
context 'when the subgroup "Share with group lock" is disabled' do
- let(:subgroup) { create(:group)}
+ let(:subgroup) { create(:group) }
it 'the subgroup "Share with group lock" does not change' do
subgroup.parent = root_group
@@ -2027,7 +2012,7 @@ RSpec.describe Namespace do
end
with_them do
- let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners) }
it 'returns the result' do
expect(namespace.shared_runners_setting).to eq(shared_runners_setting)
@@ -2051,7 +2036,7 @@ RSpec.describe Namespace do
end
with_them do
- let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners)}
+ let(:namespace) { build(:namespace, shared_runners_enabled: shared_runners_enabled, allow_descendants_override_disabled_shared_runners: allow_descendants_override_disabled_shared_runners) }
it 'returns the result' do
expect(namespace.shared_runners_setting_higher_than?(other_setting)).to eq(result)
@@ -2282,9 +2267,8 @@ RSpec.describe Namespace do
stub_feature_flags(namespace_storage_limit_bypass_date_check: false)
end
- # Date TBD: https://gitlab.com/gitlab-org/gitlab/-/issues/350632
- it 'returns nil' do
- expect(namespace.storage_enforcement_date).to be(nil)
+ it 'returns correct date' do
+ expect(namespace.storage_enforcement_date).to eql(Date.new(2022, 10, 19))
end
context 'when :storage_banner_bypass_date_check is enabled' do
diff --git a/spec/models/namespaces/project_namespace_spec.rb b/spec/models/namespaces/project_namespace_spec.rb
index c995571c3c9..78403db7fa8 100644
--- a/spec/models/namespaces/project_namespace_spec.rb
+++ b/spec/models/namespaces/project_namespace_spec.rb
@@ -5,6 +5,14 @@ require 'spec_helper'
RSpec.describe Namespaces::ProjectNamespace, type: :model do
describe 'relationships' do
it { is_expected.to have_one(:project).with_foreign_key(:project_namespace_id).inverse_of(:project_namespace) }
+
+ specify do
+ project = create(:project)
+ namespace = project.project_namespace
+ namespace.reload_project
+
+ expect(namespace.project).to eq project
+ end
end
describe 'validations' do
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index fc6f7832c2c..ca558848cb0 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -823,14 +823,14 @@ RSpec.describe Note do
end
context 'with :label action' do
- let!(:metadata) {create(:system_note_metadata, note: note, action: :label)}
+ let!(:metadata) { create(:system_note_metadata, note: note, action: :label) }
it_behaves_like 'system_note_metadata includes note action'
it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference label note' do
- let(:label) { create(:label, project: issue.project)}
+ let(:label) { create(:label, project: issue.project) }
let(:note) { create(:system_note, note: "added #{label.to_reference} label", noteable: issue, project: issue.project) }
it { expect(note.system_note_with_references?).to be_truthy }
@@ -838,14 +838,14 @@ RSpec.describe Note do
end
context 'with :milestone action' do
- let!(:metadata) {create(:system_note_metadata, note: note, action: :milestone)}
+ let!(:metadata) { create(:system_note_metadata, note: note, action: :milestone) }
it_behaves_like 'system_note_metadata includes note action'
it { expect(note.system_note_with_references?).to be_falsy }
context 'with cross reference milestone note' do
- let(:milestone) { create(:milestone, project: issue.project)}
+ let(:milestone) { create(:milestone, project: issue.project) }
let(:note) { create(:system_note, note: "added #{milestone.to_reference} milestone", noteable: issue, project: issue.project) }
it { expect(note.system_note_with_references?).to be_truthy }
@@ -1130,7 +1130,7 @@ RSpec.describe Note do
end
describe '#cache_markdown_field' do
- let(:html) { '<p>some html</p>'}
+ let(:html) { '<p>some html</p>' }
before do
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_call_original
@@ -1792,4 +1792,68 @@ RSpec.describe Note do
end
end
end
+
+ shared_examples 'note that replaces task for checklist item in body text' do
+ subject { note.public_send(field_name) }
+
+ context 'when note is not a system note' do
+ let(:note) { create(:note, note: original_note_body) }
+
+ it { is_expected.to eq(unchanged_note_body) }
+ end
+
+ context 'when note is a system note' do
+ context 'when note noteable_type is not Issue' do
+ let(:note) { create(:note, :system, :on_merge_request, note: original_note_body) }
+
+ it { is_expected.to eq(unchanged_note_body) }
+ end
+
+ context 'when note noteable_type is Issue' do
+ let(:note) { create(:note, :system, :on_issue, note: original_note_body) }
+
+ it { is_expected.to eq(expected_text_replacement) }
+ end
+ end
+ end
+
+ describe '#note' do
+ let(:field_name) { :note }
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as completed' }
+ let(:unchanged_note_body) { original_note_body }
+ let(:expected_text_replacement) { 'marked the checklist item **task 1** as completed' }
+ end
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as incomplete' }
+ let(:unchanged_note_body) { original_note_body }
+ let(:expected_text_replacement) { 'marked the checklist item **task 1** as incomplete' }
+ end
+ end
+
+ describe '#note_html' do
+ let(:field_name) { :note_html }
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as completed' }
+ let(:unchanged_note_body) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the task <strong>task 1</strong> as completed</p>' }
+ let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as completed</p>' }
+
+ before do
+ note.update_columns(note_html: unchanged_note_body)
+ end
+ end
+
+ it_behaves_like 'note that replaces task for checklist item in body text' do
+ let(:original_note_body) { 'marked the task **task 1** as incomplete' }
+ let(:unchanged_note_body) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the task <strong>task 1</strong> as incomplete</p>' }
+ let(:expected_text_replacement) { '<p data-sourcepos="1:1-1:48" dir="auto">marked the checklist item <strong>task 1</strong> as incomplete</p>' }
+
+ before do
+ note.update_columns(note_html: unchanged_note_body)
+ end
+ end
+ end
end
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index 2b47da1ebe1..544f6643712 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -10,27 +10,6 @@ RSpec.describe OauthAccessToken do
let(:token) { create(:oauth_access_token, application_id: app_one.id) }
describe 'scopes' do
- describe '.distinct_resource_owner_counts' do
- let(:tokens) { described_class.all }
-
- before do
- token
- create_list(:oauth_access_token, 2, resource_owner: user, application_id: app_two.id)
- end
-
- it 'returns unique owners' do
- expect(tokens.count).to eq(3)
- expect(tokens.distinct_resource_owner_counts([app_one])).to eq({ app_one.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_two])).to eq({ app_two.id => 1 })
- expect(tokens.distinct_resource_owner_counts([app_three])).to eq({})
- expect(tokens.distinct_resource_owner_counts([app_one, app_two]))
- .to eq({
- app_one.id => 1,
- app_two.id => 1
- })
- end
- end
-
describe '.latest_per_application' do
let!(:app_two_token1) { create(:oauth_access_token, application: app_two) }
let!(:app_two_token2) { create(:oauth_access_token, application: app_two) }
@@ -43,4 +22,51 @@ RSpec.describe OauthAccessToken do
end
end
end
+
+ describe 'Doorkeeper secret storing' do
+ it 'stores the token in hashed format' do
+ expect(token.token).not_to eq(token.plaintext_token)
+ end
+
+ it 'does not allow falling back to plaintext token comparison' do
+ expect(described_class.by_token(token.token)).to be_nil
+ end
+
+ it 'finds a token by plaintext token' do
+ expect(described_class.by_token(token.plaintext_token)).to be_a(OauthAccessToken)
+ end
+
+ context 'when the token is stored in plaintext' do
+ let(:plaintext_token) { Devise.friendly_token(20) }
+
+ before do
+ token.update_column(:token, plaintext_token)
+ end
+
+ it 'falls back to plaintext token comparison' do
+ expect(described_class.by_token(plaintext_token)).to be_a(OauthAccessToken)
+ end
+ end
+
+ context 'when hash_oauth_secrets is disabled' do
+ let(:hashed_token) { create(:oauth_access_token, application_id: app_one.id) }
+
+ before do
+ hashed_token
+ stub_feature_flags(hash_oauth_tokens: false)
+ end
+
+ it 'stores the token in plaintext' do
+ expect(token.token).to eq(token.plaintext_token)
+ end
+
+ it 'finds a token by plaintext token' do
+ expect(described_class.by_token(token.plaintext_token)).to be_a(OauthAccessToken)
+ end
+
+ it 'does not find a token that was previously stored as hashed' do
+ expect(described_class.by_token(hashed_token.plaintext_token)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/models/onboarding_progress_spec.rb b/spec/models/onboarding_progress_spec.rb
index 80a39404d10..9688dd01c71 100644
--- a/spec/models/onboarding_progress_spec.rb
+++ b/spec/models/onboarding_progress_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe OnboardingProgress do
describe 'validations' do
describe 'namespace_is_root_namespace' do
- subject(:onboarding_progress) { build(:onboarding_progress, namespace: namespace)}
+ subject(:onboarding_progress) { build(:onboarding_progress, namespace: namespace) }
context 'when associated namespace is root' do
it { is_expected.to be_valid }
diff --git a/spec/models/packages/cleanup/policy_spec.rb b/spec/models/packages/cleanup/policy_spec.rb
index a37042520e7..0b6dff472c1 100644
--- a/spec/models/packages/cleanup/policy_spec.rb
+++ b/spec/models/packages/cleanup/policy_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Packages::Cleanup::Policy, type: :model do
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
+
it do
is_expected
.to validate_inclusion_of(:keep_n_duplicated_package_files)
diff --git a/spec/models/packages/conan/metadatum_spec.rb b/spec/models/packages/conan/metadatum_spec.rb
index d00723e8e43..92c8b126639 100644
--- a/spec/models/packages/conan/metadatum_spec.rb
+++ b/spec/models/packages/conan/metadatum_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Packages::Conan::Metadatum, type: :model do
end
describe 'validations' do
- let(:fifty_one_characters) { 'f_a' * 17}
+ let(:fifty_one_characters) { 'f_a' * 17 }
it { is_expected.to validate_presence_of(:package) }
it { is_expected.to validate_presence_of(:package_username) }
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index 82f5b44f38f..9554fc3bb1b 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -126,7 +126,7 @@ RSpec.describe Packages::PackageFile, type: :model do
describe '.with_conan_package_reference' do
let_it_be(:non_matching_package_file) { create(:package_file, :nuget) }
let_it_be(:metadatum) { create(:conan_file_metadatum, :package_file) }
- let_it_be(:reference) { metadatum.conan_package_reference}
+ let_it_be(:reference) { metadatum.conan_package_reference }
it 'returns matching packages' do
expect(described_class.with_conan_package_reference(reference))
@@ -150,8 +150,8 @@ RSpec.describe Packages::PackageFile, type: :model do
context 'Debian scopes' do
let_it_be(:debian_changes) { debian_package.package_files.last }
- let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package)}
- let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package)}
+ let_it_be(:debian_deb) { create(:debian_package_file, package: debian_package) }
+ let_it_be(:debian_udeb) { create(:debian_package_file, :udeb, package: debian_package) }
let_it_be(:debian_contrib) do
create(:debian_package_file, package: debian_package).tap do |pf|
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 06f02f021cf..526c57d08b0 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe Packages::Package, type: :model do
context 'conan package' do
subject { build_stubbed(:conan_package) }
- let(:fifty_one_characters) {'f_b' * 17}
+ let(:fifty_one_characters) { 'f_b' * 17 }
it { is_expected.to allow_value('foo+bar').for(:name) }
it { is_expected.to allow_value('foo_bar').for(:name) }
@@ -243,7 +243,7 @@ RSpec.describe Packages::Package, type: :model do
context 'conan package' do
subject { build_stubbed(:conan_package) }
- let(:fifty_one_characters) {'1.2' * 17}
+ let(:fifty_one_characters) { '1.2' * 17 }
it { is_expected.to allow_value('1.2').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
@@ -441,7 +441,7 @@ RSpec.describe Packages::Package, type: :model do
context 'npm package' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
- let_it_be(:second_project) { create(:project, namespace: group)}
+ let_it_be(:second_project) { create(:project, namespace: group) }
let(:package) { build(:npm_package, project: project, name: name) }
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 69866d497a1..f3ef347121e 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -193,6 +193,20 @@ RSpec.describe PersonalAccessToken do
end
describe 'scopes' do
+ describe '.active' do
+ let_it_be(:revoked_token) { create(:personal_access_token, :revoked) }
+ let_it_be(:not_revoked_false_token) { create(:personal_access_token, revoked: false) }
+ let_it_be(:not_revoked_nil_token) { create(:personal_access_token, revoked: nil) }
+ let_it_be(:expired_token) { create(:personal_access_token, :expired) }
+ let_it_be(:not_expired_token) { create(:personal_access_token) }
+ let_it_be(:never_expires_token) { create(:personal_access_token, expires_at: nil) }
+
+ it 'includes non-revoked and non-expired tokens' do
+ expect(described_class.active)
+ .to match_array([not_revoked_false_token, not_revoked_nil_token, not_expired_token, never_expires_token])
+ end
+ end
+
describe '.expiring_and_not_notified' do
let_it_be(:expired_token) { create(:personal_access_token, expires_at: 2.days.ago) }
let_it_be(:revoked_token) { create(:personal_access_token, revoked: true) }
@@ -251,7 +265,7 @@ RSpec.describe PersonalAccessToken do
describe '.simple_sorts' do
it 'includes overridden keys' do
- expect(described_class.simple_sorts.keys).to include(*%w(expires_at_asc expires_at_desc))
+ expect(described_class.simple_sorts.keys).to include(*%w(expires_at_asc expires_at_desc expires_at_asc_id_desc))
end
end
@@ -270,5 +284,13 @@ RSpec.describe PersonalAccessToken do
expect(described_class.order_expires_at_desc).to match [later_token, earlier_token]
end
end
+
+ describe '.order_expires_at_asc_id_desc' do
+ let_it_be(:earlier_token_2) { create(:personal_access_token, expires_at: 2.days.ago) }
+
+ it 'returns ordered list in combination of expires_at ascending and id descending' do
+ expect(described_class.order_expires_at_asc_id_desc).to eq [earlier_token_2, earlier_token, later_token]
+ end
+ end
end
end
diff --git a/spec/models/postgresql/replication_slot_spec.rb b/spec/models/postgresql/replication_slot_spec.rb
index 63a19541ab5..35c166ab064 100644
--- a/spec/models/postgresql/replication_slot_spec.rb
+++ b/spec/models/postgresql/replication_slot_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe Postgresql::ReplicationSlot do
describe '#slots_retained_bytes' do
it 'returns the number of retained bytes' do
- slot = described_class.slots_retained_bytes.find {|x| x['slot_name'] == 'test_slot' }
+ slot = described_class.slots_retained_bytes.find { |x| x['slot_name'] == 'test_slot' }
expect(slot).not_to be_nil
expect(slot['retained_bytes']).to be_nil
diff --git a/spec/models/preloaders/labels_preloader_spec.rb b/spec/models/preloaders/labels_preloader_spec.rb
index 94de00bb94c..86e64d114c7 100644
--- a/spec/models/preloaders/labels_preloader_spec.rb
+++ b/spec/models/preloaders/labels_preloader_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Preloaders::LabelsPreloader do
let_it_be(:user) { create(:user) }
shared_examples 'an efficient database query' do
- let(:subscriptions) { labels.each { |l| create(:subscription, subscribable: l, project: l.project, user: user) }}
+ let(:subscriptions) { labels.each { |l| create(:subscription, subscribable: l, project: l.project, user: user) } }
it 'does not make n+1 queries' do
first_label = labels_with_preloaded_data.first
diff --git a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
index 2060e6cd44a..5e2aaa8b456 100644
--- a/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
+++ b/spec/models/preloaders/user_max_access_level_in_groups_preloader_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Preloaders::UserMaxAccessLevelInGroupsPreloader do
context 'when the preloader is used', :request_store do
context 'when user has indirect access to groups' do
- let_it_be(:child_maintainer) { create(:group, :private, parent: group1).tap {|g| g.add_maintainer(user)} }
+ let_it_be(:child_maintainer) { create(:group, :private, parent: group1).tap { |g| g.add_maintainer(user) } }
let_it_be(:child_indirect_access) { create(:group, :private, parent: group1) }
let(:groups) { [group1, group2, group3, child_maintainer, child_indirect_access] }
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index f46a1646554..98b202299a8 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Project, factory_default: :keep do
include ProjectForksHelper
- include GitHelpers
include ExternalAuthorizationServiceHelpers
include ReloadHelpers
include StubGitlabCalls
@@ -45,6 +44,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:mattermost_integration) }
it { is_expected.to have_one(:hangouts_chat_integration) }
it { is_expected.to have_one(:unify_circuit_integration) }
+ it { is_expected.to have_one(:pumble_integration) }
it { is_expected.to have_one(:webex_teams_integration) }
it { is_expected.to have_one(:packagist_integration) }
it { is_expected.to have_one(:pushover_integration) }
@@ -148,6 +148,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:build_trace_chunks).through(:builds).dependent(:restrict_with_error) }
it { is_expected.to have_many(:secure_files).class_name('Ci::SecureFile').dependent(:restrict_with_error) }
it { is_expected.to have_one(:build_artifacts_size_refresh).class_name('Projects::BuildArtifactsSizeRefresh') }
+ it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout').with_foreign_key(:project_id) }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -832,6 +833,9 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to delegate_method(:last_pipeline).to(:commit).allow_nil }
it { is_expected.to delegate_method(:container_registry_enabled?).to(:project_feature) }
it { is_expected.to delegate_method(:container_registry_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:environments_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:feature_flags_access_level).to(:project_feature) }
+ it { is_expected.to delegate_method(:releases_access_level).to(:project_feature) }
describe 'read project settings' do
%i(
@@ -873,6 +877,12 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#ci_allow_fork_pipelines_to_run_in_parent_project?' do
+ it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
+ let(:delegated_method) { :allow_fork_pipelines_to_run_in_parent_project? }
+ end
+ end
+
describe '#ci_job_token_scope_enabled?' do
it_behaves_like 'a ci_cd_settings predicate method', prefix: 'ci_' do
let(:delegated_method) { :job_token_scope_enabled? }
@@ -5741,16 +5751,18 @@ RSpec.describe Project, factory_default: :keep do
describe '#set_full_path' do
let_it_be(:project) { create(:project, :repository) }
+ let(:repository) { project.repository.raw }
+
it 'writes full path in .git/config when key is missing' do
project.set_full_path
- expect(rugged_config['gitlab.fullpath']).to eq project.full_path
+ expect(repository.full_path).to eq project.full_path
end
it 'updates full path in .git/config when key is present' do
project.set_full_path(gl_full_path: 'old/path')
- expect { project.set_full_path }.to change { rugged_config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
+ expect { project.set_full_path }.to change { repository.full_path }.from('old/path').to(project.full_path)
end
it 'does not raise an error with an empty repository' do
@@ -5880,7 +5892,7 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#has_active_hooks?' do
- let_it_be(:project) { create(:project) }
+ let_it_be_with_refind(:project) { create(:project) }
it { expect(project.has_active_hooks?).to be_falsey }
@@ -7471,7 +7483,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
end
@@ -7488,7 +7500,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
context 'for a different package type' do
@@ -7511,7 +7523,7 @@ RSpec.describe Project, factory_default: :keep do
end
with_them do
- it { is_expected.to eq expected_result}
+ it { is_expected.to eq expected_result }
end
end
end
@@ -8240,58 +8252,52 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#work_items_feature_flag_enabled?' do
- shared_examples 'project checking work_items feature flag' do
- context 'when work_items FF is disabled globally' do
- before do
- stub_feature_flags(work_items: false)
- end
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- it { is_expected.to be_falsey }
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_feature_flag_enabled? }
+ let(:feature_flag) { :work_items }
+ let(:subject_project) { group_project }
+ end
+
+ context 'when feature flag is enabled for the project' do
+ subject { subject_project.work_items_feature_flag_enabled? }
+
+ before do
+ stub_feature_flags(work_items: subject_project)
end
- context 'when work_items FF is enabled for the project' do
- before do
- stub_feature_flags(work_items: project)
- end
+ context 'when project belongs to a group' do
+ let(:subject_project) { group_project }
it { is_expected.to be_truthy }
end
- context 'when work_items FF is enabled globally' do
+ context 'when project does not belong to a group' do
+ let(:subject_project) { create(:project, namespace: create(:namespace)) }
+
it { is_expected.to be_truthy }
end
end
+ end
- subject { project.work_items_feature_flag_enabled? }
-
- context 'when a project does not belong to a group' do
- let_it_be(:project) { create(:project, namespace: namespace) }
+ describe '#work_items_mvc_2_feature_flag_enabled?' do
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- it_behaves_like 'project checking work_items feature flag'
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_mvc_2_feature_flag_enabled? }
+ let(:feature_flag) { :work_items_mvc_2 }
+ let(:subject_project) { group_project }
end
+ end
- context 'when project belongs to a group' do
- let_it_be(:root_group) { create(:group) }
- let_it_be(:group) { create(:group, parent: root_group) }
- let_it_be(:project) { create(:project, group: group) }
-
- it_behaves_like 'project checking work_items feature flag'
-
- context 'when work_items FF is enabled for the root group' do
- before do
- stub_feature_flags(work_items: root_group)
- end
-
- it { is_expected.to be_truthy }
- end
+ describe '#work_items_create_from_markdown_feature_flag_enabled?' do
+ let_it_be(:group_project) { create(:project, :in_subgroup) }
- context 'when work_items FF is enabled for the group' do
- before do
- stub_feature_flags(work_items: group)
- end
-
- it { is_expected.to be_truthy }
- end
+ it_behaves_like 'checks parent group feature flag' do
+ let(:feature_flag_method) { :work_items_create_from_markdown_feature_flag_enabled? }
+ let(:feature_flag) { :work_items_create_from_markdown }
+ let(:subject_project) { group_project }
end
end
@@ -8428,6 +8434,23 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#destroy_deployment_by_id' do
+ let(:project) { create(:project, :repository) }
+
+ let!(:deployment) { create(:deployment, :created, project: project) }
+ let!(:old_deployment) { create(:deployment, :created, project: project, finished_at: 1.year.ago) }
+
+ it 'will call fast_destroy_all on a specific deployment by id' do
+ expect(Deployment).to receive(:fast_destroy_all).and_call_original
+
+ expect do
+ project.destroy_deployment_by_id(project.deployments.first.id)
+ end.to change { project.deployments.count }.by(-1)
+
+ expect(project.deployments).to match_array([old_deployment])
+ end
+ end
+
private
def finish_job(export_job)
@@ -8435,10 +8458,6 @@ RSpec.describe Project, factory_default: :keep do
export_job.finish
end
- def rugged_config
- rugged_repo(project.repository).config
- end
-
def create_pipeline(project, status = 'success')
create(:ci_pipeline, project: project,
sha: project.commit.sha,
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 53175a2f840..f4edc68457b 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -26,31 +26,20 @@ RSpec.describe ProjectStatistics do
end
describe 'statistics columns' do
- it "support values up to 8 exabytes" do
- statistics.update!(
- commit_count: 8.exabytes - 1,
- repository_size: 2.exabytes,
- wiki_size: 1.exabytes,
- lfs_objects_size: 2.exabytes,
- build_artifacts_size: 1.exabyte,
- snippets_size: 1.exabyte,
- pipeline_artifacts_size: 512.petabytes - 1,
- uploads_size: 512.petabytes,
- container_registry_size: 12.petabytes
- )
-
- statistics.reload
-
- expect(statistics.commit_count).to eq(8.exabytes - 1)
- expect(statistics.repository_size).to eq(2.exabytes)
- expect(statistics.wiki_size).to eq(1.exabytes)
- expect(statistics.lfs_objects_size).to eq(2.exabytes)
- expect(statistics.build_artifacts_size).to eq(1.exabyte)
- expect(statistics.storage_size).to eq(8.exabytes - 1)
- expect(statistics.snippets_size).to eq(1.exabyte)
- expect(statistics.pipeline_artifacts_size).to eq(512.petabytes - 1)
- expect(statistics.uploads_size).to eq(512.petabytes)
- expect(statistics.container_registry_size).to eq(12.petabytes)
+ it "supports bigint values" do
+ expect do
+ statistics.update!(
+ commit_count: 3.gigabytes,
+ repository_size: 3.gigabytes,
+ wiki_size: 3.gigabytes,
+ lfs_objects_size: 3.gigabytes,
+ build_artifacts_size: 3.gigabytes,
+ snippets_size: 3.gigabytes,
+ pipeline_artifacts_size: 3.gigabytes,
+ uploads_size: 3.gigabytes,
+ container_registry_size: 3.gigabytes
+ )
+ end.not_to raise_error
end
end
diff --git a/spec/models/projects/import_export/relation_export_spec.rb b/spec/models/projects/import_export/relation_export_spec.rb
index c74ca82e161..8643fbc7b46 100644
--- a/spec/models/projects/import_export/relation_export_spec.rb
+++ b/spec/models/projects/import_export/relation_export_spec.rb
@@ -20,4 +20,36 @@ RSpec.describe Projects::ImportExport::RelationExport, type: :model do
it { is_expected.to validate_length_of(:jid).is_at_most(255) }
it { is_expected.to validate_length_of(:export_error).is_at_most(300) }
end
+
+ describe '.by_relation' do
+ it 'returns export relations filtered by relation name' do
+ project_relation_export_1 = create(:project_relation_export, relation: 'labels')
+ project_relation_export_2 = create(:project_relation_export, relation: 'labels')
+ create(:project_relation_export, relation: 'uploads')
+
+ relations = described_class.by_relation('labels').to_a
+
+ expect(relations).to match_array([project_relation_export_1, project_relation_export_2])
+ end
+ end
+
+ describe '.relation_names_list' do
+ it 'includes extra relations list' do
+ expect(described_class.relation_names_list).to include(
+ 'design_repository', 'lfs_objects', 'repository', 'snippets_repository', 'uploads', 'wiki_repository'
+ )
+ end
+
+ it 'includes root tree relation name project' do
+ expect(described_class.relation_names_list).to include('project')
+ end
+
+ it 'includes project tree top level relation nodes' do
+ expect(described_class.relation_names_list).to include('milestones', 'issues', 'snippets', 'releases')
+ end
+
+ it 'includes project tree nested relation nodes' do
+ expect(described_class.relation_names_list).not_to include('events', 'notes')
+ end
+ end
end
diff --git a/spec/models/projects/topic_spec.rb b/spec/models/projects/topic_spec.rb
index fc9d9bef437..f9659ef352c 100644
--- a/spec/models/projects/topic_spec.rb
+++ b/spec/models/projects/topic_spec.rb
@@ -30,6 +30,17 @@ RSpec.describe Projects::Topic do
end
describe 'scopes' do
+ describe 'without_assigned_projects' do
+ let_it_be(:unassigned_topic) { create(:topic, name: 'unassigned topic') }
+ let_it_be(:project) { create(:project, :public, topic_list: 'topic') }
+
+ it 'returns topics without assigned projects' do
+ topics = described_class.without_assigned_projects
+
+ expect(topics).to contain_exactly(unassigned_topic)
+ end
+ end
+
describe 'order_by_non_private_projects_count' do
let!(:topic1) { create(:topic, name: 'topicB') }
let!(:topic2) { create(:topic, name: 'topicC') }
diff --git a/spec/models/protected_branch_spec.rb b/spec/models/protected_branch_spec.rb
index a3fc09b31fb..3936e7127b8 100644
--- a/spec/models/protected_branch_spec.rb
+++ b/spec/models/protected_branch_spec.rb
@@ -167,36 +167,130 @@ RSpec.describe ProtectedBranch do
expect(described_class.protected?(project, nil)).to eq(false)
end
- context 'with caching', :use_clean_rails_memory_store_caching do
+ context 'with caching', :use_clean_rails_redis_caching do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:protected_branch) { create(:protected_branch, project: project, name: "“jawn”") }
+ let(:feature_flag) { true }
+ let(:dry_run) { true }
+
+ shared_examples_for 'hash based cache implementation' do
+ it 'calls only hash based cache implementation' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |instance|
+ expect(instance).to receive(:fetch).with('missing-branch', anything).and_call_original
+ end
+
+ expect(Rails.cache).not_to receive(:fetch)
+
+ described_class.protected?(project, 'missing-branch', dry_run: dry_run)
+ end
+ end
+
before do
- allow(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+ stub_feature_flags(hash_based_cache_for_protected_branches: feature_flag)
+ allow(described_class).to receive(:matching).and_call_original
# the original call works and warms the cache
- described_class.protected?(project, protected_branch.name)
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
end
- it 'correctly invalidates a cache' do
- expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+ context 'Dry-run: true' do
+ it 'recalculates a fresh value every time in order to check the cache is not returning stale data' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).twice
+
+ 2.times { described_class.protected?(project, protected_branch.name) }
+ end
- create(:protected_branch, project: project, name: "bar")
- # the cache is invalidated because the project has been "updated"
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ it_behaves_like 'hash based cache implementation'
end
- it 'correctly uses the cached version' do
- expect(described_class).not_to receive(:matching)
- expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ context 'Dry-run: false' do
+ let(:dry_run) { false }
+
+ it 'correctly invalidates a cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).exactly(3).times.and_call_original
+
+ create_params = { name: 'bar', merge_access_levels_attributes: [{ access_level: Gitlab::Access::DEVELOPER }] }
+ branch = ProtectedBranches::CreateService.new(project, project.owner, create_params).execute
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+
+ ProtectedBranches::UpdateService.new(project, project.owner, name: 'ber').execute(branch)
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+
+ ProtectedBranches::DestroyService.new(project, project.owner).execute(branch)
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+ end
+
+ it_behaves_like 'hash based cache implementation'
+
+ context 'when project is updated' do
+ it 'does not invalidate a cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
+
+ project.touch
+
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
+ end
+ end
+
+ context 'when other project protected branch is updated' do
+ it 'does not invalidate the current project cache' do
+ expect(described_class).not_to receive(:matching).with(protected_branch.name, protected_refs: anything)
+
+ another_project = create(:project)
+ ProtectedBranches::CreateService.new(another_project, another_project.owner, name: 'bar').execute
+
+ described_class.protected?(project, protected_branch.name, dry_run: dry_run)
+ end
+ end
+
+ it 'correctly uses the cached version' do
+ expect(described_class).not_to receive(:matching)
+
+ expect(described_class.protected?(project, protected_branch.name, dry_run: dry_run)).to eq(true)
+ end
end
- it 'sets expires_in for a cache key' do
- cache_key = described_class.protected_ref_cache_key(project, protected_branch.name)
+ context 'when feature flag hash_based_cache_for_protected_branches is off' do
+ let(:feature_flag) { false }
- expect(Rails.cache).to receive(:fetch).with(cache_key, expires_in: 1.hour)
+ it 'does not call hash based cache implementation' do
+ expect(ProtectedBranches::CacheService).not_to receive(:new)
+ expect(Rails.cache).to receive(:fetch).and_call_original
+
+ described_class.protected?(project, 'missing-branch')
+ end
+
+ it 'correctly invalidates a cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+
+ create(:protected_branch, project: project, name: "bar")
+ # the cache is invalidated because the project has been "updated"
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ end
- described_class.protected?(project, protected_branch.name)
+ it 'sets expires_in of 1 hour for the Rails cache key' do
+ cache_key = described_class.protected_ref_cache_key(project, protected_branch.name)
+
+ expect(Rails.cache).to receive(:fetch).with(cache_key, expires_in: 1.hour)
+
+ described_class.protected?(project, protected_branch.name)
+ end
+
+ context 'when project is updated' do
+ it 'invalidates Rails cache' do
+ expect(described_class).to receive(:matching).with(protected_branch.name, protected_refs: anything).once.and_call_original
+
+ project.touch
+
+ described_class.protected?(project, protected_branch.name)
+ end
+ end
+
+ it 'correctly uses the cached version' do
+ expect(described_class).not_to receive(:matching)
+ expect(described_class.protected?(project, protected_branch.name)).to eq(true)
+ end
end
end
end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index 14a43df4229..3555dfba769 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
let(:page) { 3 }
it 'responds with paginated results' do
- expect(subject[:items].first['title']).to eq('bright')
+ expect(subject[:items].first['name']).to eq('bright')
expect(subject[:next_page]).to eq(4)
end
end
@@ -37,7 +37,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
let(:page) { 4 }
it 'responds with paginated results and no next_page' do
- expect(subject[:items].first['title']).to eq("It's gonna be a bright")
+ expect(subject[:items].first['name']).to eq("It's gonna be a bright")
expect(subject[:next_page]).to eq(nil)
end
end
@@ -63,12 +63,12 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
it 'returns platform specific items' do
expect(subject[:items].count).to eq(1)
- expect(subject[:items].first['title']).to eq("bright and sunshinin' day")
+ expect(subject[:items].first['name']).to eq("bright and sunshinin' day")
expect(subject[:next_page]).to eq(2)
end
- it 'parses the body as markdown and returns html, and links are target="_blank"' do
- expect(subject[:items].first['body']).to match('<p data-sourcepos="1:1-1:62" dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
+ it 'parses the description as markdown and returns html, and links are target="_blank"' do
+ expect(subject[:items].first['description']).to match('<p data-sourcepos="1:1-1:62" dir="auto">bright and sunshinin\' <a href="https://en.wikipedia.org/wiki/Day" rel="nofollow noreferrer noopener" target="_blank">day</a></p>')
end
it 'logs an error if theres an error parsing markdown for an item, and skips it' do
@@ -83,7 +83,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
it 'responds with a different set of data' do
expect(subject[:items].count).to eq(1)
- expect(subject[:items].first['title']).to eq("I think I can make it now the pain is gone")
+ expect(subject[:items].first['name']).to eq("I think I can make it now the pain is gone")
end
end
@@ -171,7 +171,7 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
items = described_class.load_items(page: 2)
expect(items.count).to eq(1)
- expect(items.first['title']).to eq("View epics on a board")
+ expect(items.first['name']).to eq("View epics on a board")
end
end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 83d7596ff51..180a76ff593 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -233,6 +233,6 @@ RSpec.describe Release do
let_it_be(:milestone_2) { create(:milestone, project: project, title: 'Milestone 2') }
let_it_be(:release) { create(:release, project: project, milestones: [milestone_1, milestone_2]) }
- it { expect(release.milestone_titles).to eq("#{milestone_1.title}, #{milestone_2.title}")}
+ it { expect(release.milestone_titles).to eq("#{milestone_1.title}, #{milestone_2.title}") }
end
end
diff --git a/spec/models/releases/link_spec.rb b/spec/models/releases/link_spec.rb
index 74ef38f482b..4910de61c22 100644
--- a/spec/models/releases/link_spec.rb
+++ b/spec/models/releases/link_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe Releases::Link do
describe 'FILEPATH_REGEX with table' do
using RSpec::Parameterized::TableSyntax
- let(:link) { build(:release_link)}
+ let(:link) { build(:release_link) }
where(:reason, :filepath, :result) do
'cannot contain `//`' | '/https//www.example.com' | be_invalid
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 51351c9fdd1..429ad550626 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe RemoteMirror, :mailer do
- include GitHelpers
-
before do
stub_feature_flags(remote_mirror_no_delay: false)
end
@@ -96,16 +94,6 @@ RSpec.describe RemoteMirror, :mailer do
expect(mirror.url).to eq('http://foo:bar@test.com')
expect(mirror.credentials).to eq({ user: 'foo', password: 'bar' })
end
-
- it 'does not update the repository config if credentials changed' do
- mirror = create_mirror(url: 'http://foo:bar@test.com')
- repo = mirror.project.repository
- old_config = rugged_repo(repo).config
-
- mirror.update_attribute(:url, 'http://foo:baz@test.com')
-
- expect(rugged_repo(repo).config.to_hash).to eq(old_config.to_hash)
- end
end
end
@@ -231,7 +219,7 @@ RSpec.describe RemoteMirror, :mailer do
end
describe '#hard_retry!' do
- let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+ let(:remote_mirror) { create(:remote_mirror).tap { |mirror| mirror.update_column(:url, 'invalid') } }
it 'transitions an invalid mirror to the to_retry state' do
remote_mirror.hard_retry!('Invalid')
@@ -242,7 +230,7 @@ RSpec.describe RemoteMirror, :mailer do
end
describe '#hard_fail!' do
- let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+ let(:remote_mirror) { create(:remote_mirror).tap { |mirror| mirror.update_column(:url, 'invalid') } }
it 'transitions an invalid mirror to the failed state' do
remote_mirror.hard_fail!('Invalid')
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index b3fbe75a526..530b03714b4 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1469,6 +1469,20 @@ RSpec.describe Repository do
expect(repository.find_branch(branch_name)).to be_nil
end
end
+
+ it 'expires branches cache' do
+ expect(repository).to receive(:expire_branches_cache)
+
+ subject
+ end
+
+ context 'when expire_cache: false' do
+ it 'does not expire branches cache' do
+ expect(repository).not_to receive(:expire_branches_cache)
+
+ repository.add_branch(user, branch_name, target, expire_cache: false)
+ end
+ end
end
shared_examples 'asymmetric cached method' do |method|
@@ -2263,10 +2277,34 @@ RSpec.describe Repository do
.with(%i(branch_names merged_branch_names branch_count has_visible_content? has_ambiguous_refs?))
.and_call_original
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
repository.expire_branches_cache
end
end
+ describe '#expire_protected_branches_cache' do
+ it 'expires the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
+ repository.expire_protected_branches_cache
+ end
+
+ context 'when repository does not have a project' do
+ let!(:snippet) { create(:personal_snippet, :repository) }
+
+ it 'does not expire the cache' do
+ expect(ProtectedBranches::CacheService).not_to receive(:new)
+
+ snippet.repository.expire_protected_branches_cache
+ end
+ end
+ end
+
describe '#expire_tags_cache' do
it 'expires the cache' do
expect(repository).to receive(:expire_method_caches)
@@ -3123,7 +3161,7 @@ RSpec.describe Repository do
it 'after_create is not executed' do
expect(repository).not_to receive(:after_create)
- expect {repository.create_from_bundle(valid_bundle_path)}.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
+ expect { repository.create_from_bundle(valid_bundle_path) }.to raise_error(::Gitlab::Git::BundleFile::InvalidBundleError)
end
end
end
diff --git a/spec/models/snippet_input_action_collection_spec.rb b/spec/models/snippet_input_action_collection_spec.rb
index 3ec206bd031..269a9e1c787 100644
--- a/spec/models/snippet_input_action_collection_spec.rb
+++ b/spec/models/snippet_input_action_collection_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe SnippetInputActionCollection do
it { is_expected.to delegate_method(:[]).to(:actions) }
describe '#to_commit_actions' do
- subject { described_class.new(data).to_commit_actions}
+ subject { described_class.new(data).to_commit_actions }
it 'translates all actions to commit actions' do
transformed_action = action.merge(action: action_name.to_sym)
@@ -22,14 +22,14 @@ RSpec.describe SnippetInputActionCollection do
end
describe '#valid?' do
- subject { described_class.new(data).valid?}
+ subject { described_class.new(data).valid? }
it 'returns true' do
expect(subject).to be true
end
context 'when any of the actions is invalid' do
- let(:data) { [action, { action: 'foo' }, action]}
+ let(:data) { [action, { action: 'foo' }, action] }
it 'returns false' do
expect(subject).to be false
diff --git a/spec/models/snippet_spec.rb b/spec/models/snippet_spec.rb
index a54edc8510e..38bd189f6f4 100644
--- a/spec/models/snippet_spec.rb
+++ b/spec/models/snippet_spec.rb
@@ -571,8 +571,8 @@ RSpec.describe Snippet do
context 'when some blobs are not retrievable from repository' do
let(:snippet) { create(:snippet, :repository) }
let(:container) { double(:container) }
- let(:retrievable_filename) { 'retrievable_file'}
- let(:unretrievable_filename) { 'unretrievable_file'}
+ let(:retrievable_filename) { 'retrievable_file' }
+ let(:unretrievable_filename) { 'unretrievable_file' }
before do
allow(snippet).to receive(:list_files).and_return([retrievable_filename, unretrievable_filename])
diff --git a/spec/models/u2f_registration_spec.rb b/spec/models/u2f_registration_spec.rb
index 6bb9ccfcf35..1fab3882c2a 100644
--- a/spec/models/u2f_registration_spec.rb
+++ b/spec/models/u2f_registration_spec.rb
@@ -6,23 +6,68 @@ RSpec.describe U2fRegistration do
let_it_be(:user) { create(:user) }
let(:u2f_registration_name) { 'u2f_device' }
+ let(:app_id) { FFaker::BaconIpsum.characters(5) }
+ let(:device) { U2F::FakeU2F.new(app_id) }
- let(:u2f_registration) do
- device = U2F::FakeU2F.new(FFaker::BaconIpsum.characters(5))
- create(:u2f_registration, name: u2f_registration_name,
- user: user,
- certificate: Base64.strict_encode64(device.cert_raw),
- key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
- public_key: Base64.strict_encode64(device.origin_public_key_raw))
+ describe '.authenticate' do
+ context 'when registration is found' do
+ it 'returns true' do
+ create_u2f_registration
+ device_challenge = U2F.urlsafe_encode64(SecureRandom.random_bytes(32))
+ sign_response_json = device.sign_response(device_challenge)
+
+ response = U2fRegistration.authenticate(
+ user,
+ app_id,
+ sign_response_json,
+ device_challenge
+ )
+
+ expect(response).to eq true
+ end
+ end
+
+ context 'when registration not found' do
+ it 'returns nil' do
+ device_challenge = U2F.urlsafe_encode64(SecureRandom.random_bytes(32))
+ sign_response_json = device.sign_response(device_challenge)
+
+ # data is valid but user does not have any u2f_registrations
+ response = U2fRegistration.authenticate(
+ user,
+ app_id,
+ sign_response_json,
+ device_challenge
+ )
+
+ expect(response).to eq nil
+ end
+ end
+
+ context 'when args passed in are invalid' do
+ it 'returns false' do
+ some_app_id = 123
+ invalid_json = 'invalid JSON'
+ challenges = 'whatever'
+
+ response = U2fRegistration.authenticate(
+ user,
+ some_app_id,
+ invalid_json,
+ challenges
+ )
+
+ expect(response).to eq false
+ end
+ end
end
describe 'callbacks' do
- describe '#create_webauthn_registration' do
+ describe 'after create' do
shared_examples_for 'creates webauthn registration' do
it 'creates webauthn registration' do
- created_record = u2f_registration
-
- webauthn_registration = WebauthnRegistration.where(u2f_registration_id: created_record.id)
+ u2f_registration = create_u2f_registration
+ webauthn_registration = WebauthnRegistration.where(u2f_registration_id: u2f_registration.id)
expect(webauthn_registration).to exist
end
end
@@ -52,8 +97,45 @@ RSpec.describe U2fRegistration do
receive(:track_exception).with(kind_of(StandardError),
u2f_registration_id: 123))
- u2f_registration
+ create_u2f_registration
end
end
+
+ describe 'after update' do
+ context 'when counter is updated' do
+ it 'updates the webauthn registration counter to be the same value' do
+ u2f_registration = create_u2f_registration
+ new_counter = u2f_registration.counter + 1
+ webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id)
+
+ u2f_registration.update!(counter: new_counter)
+
+ expect(u2f_registration.reload.counter).to eq(new_counter)
+ expect(webauthn_registration.reload.counter).to eq(new_counter)
+ end
+ end
+
+ context 'when sign count of registration is not updated' do
+ it 'does not update the counter' do
+ u2f_registration = create_u2f_registration
+ webauthn_registration = WebauthnRegistration.find_by(u2f_registration_id: u2f_registration.id)
+
+ expect do
+ u2f_registration.update!(name: 'a new name')
+ end.not_to change { webauthn_registration.counter }
+ end
+ end
+ end
+ end
+
+ def create_u2f_registration
+ create(
+ :u2f_registration,
+ name: u2f_registration_name,
+ user: user,
+ certificate: Base64.strict_encode64(device.cert_raw),
+ key_handle: U2F.urlsafe_encode64(device.key_handle_raw),
+ public_key: Base64.strict_encode64(device.origin_public_key_raw)
+ )
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index ae6ebdbc6fd..69cd51137b5 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -137,6 +137,7 @@ RSpec.describe User do
it { is_expected.to have_many(:callouts).class_name('Users::Callout') }
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
it { is_expected.to have_many(:namespace_callouts).class_name('Users::NamespaceCallout') }
+ it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout') }
describe '#user_detail' do
it 'does not persist `user_detail` by default' do
@@ -1082,20 +1083,6 @@ RSpec.describe User do
end
end
- describe '.by_id_and_login' do
- let_it_be(:user) { create(:user) }
-
- it 'finds a user regardless of case' do
- expect(described_class.by_id_and_login(user.id, user.username.upcase))
- .to contain_exactly(user)
- end
-
- it 'finds a user when login is an email address regardless of case' do
- expect(described_class.by_id_and_login(user.id, user.email.upcase))
- .to contain_exactly(user)
- end
- end
-
describe '.for_todos' do
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
@@ -1792,9 +1779,10 @@ RSpec.describe User do
describe '#generate_password' do
it 'does not generate password by default' do
- user = create(:user, password: 'abcdefghe')
+ password = User.random_password
+ user = create(:user, password: password)
- expect(user.password).to eq('abcdefghe')
+ expect(user.password).to eq(password)
end
end
@@ -2831,162 +2819,144 @@ RSpec.describe User do
end
end
- shared_examples '.search examples' do
- describe '.search' do
- let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
- let_it_be(:public_email) do
- create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
- user.update!(public_email: email.email)
- end
+ describe '.search' do
+ let_it_be(:user) { create(:user, name: 'user', username: 'usern', email: 'email@example.com') }
+ let_it_be(:public_email) do
+ create(:email, :confirmed, user: user, email: 'publicemail@example.com').tap do |email|
+ user.update!(public_email: email.email)
end
+ end
- let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
- let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
- let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, name: 'not verified', username: 'notverified') }
-
- let_it_be(:unconfirmed_secondary_email) { create(:email, user: user, email: 'alias@example.com') }
- let_it_be(:confirmed_secondary_email) { create(:email, :confirmed, user: user, email: 'alias2@example.com') }
+ let_it_be(:user2) { create(:user, name: 'user name', username: 'username', email: 'someemail@example.com') }
+ let_it_be(:user3) { create(:user, name: 'us', username: 'se', email: 'foo@example.com') }
+ let_it_be(:unconfirmed_user) { create(:user, :unconfirmed, name: 'not verified', username: 'notverified') }
- describe 'name user and email relative ordering' do
- let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
- let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
+ let_it_be(:unconfirmed_secondary_email) { create(:email, user: user, email: 'alias@example.com') }
+ let_it_be(:confirmed_secondary_email) { create(:email, :confirmed, user: user, email: 'alias2@example.com') }
- it 'prioritizes exact matches' do
- expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
- end
+ describe 'name user and email relative ordering' do
+ let_it_be(:named_alexander) { create(:user, name: 'Alexander Person', username: 'abcd', email: 'abcd@example.com') }
+ let_it_be(:username_alexand) { create(:user, name: 'Joao Alexander', username: 'Alexand', email: 'joao@example.com') }
- it 'falls back to ordering by name' do
- expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
- end
+ it 'prioritizes exact matches' do
+ expect(described_class.search('Alexand')).to eq([username_alexand, named_alexander])
end
- describe 'name matching' do
- it 'returns users with a matching name with exact match first' do
- expect(described_class.search(user.name)).to eq([user, user2])
- end
-
- it 'returns users with a partially matching name' do
- expect(described_class.search(user.name[0..2])).to eq([user, user2])
- end
-
- it 'returns users with a matching name regardless of the casing' do
- expect(described_class.search(user2.name.upcase)).to eq([user2])
- end
+ it 'falls back to ordering by name' do
+ expect(described_class.search('Alexander')).to eq([named_alexander, username_alexand])
+ end
+ end
- it 'returns users with a exact matching name shorter than 3 chars' do
- expect(described_class.search(user3.name)).to eq([user3])
- end
+ describe 'name matching' do
+ it 'returns users with a matching name with exact match first' do
+ expect(described_class.search(user.name)).to eq([user, user2])
+ end
- it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.name.upcase)).to eq([user3])
- end
+ it 'returns users with a partially matching name' do
+ expect(described_class.search(user.name[0..2])).to eq([user, user2])
+ end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching name' do
- expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
- end
- end
+ it 'returns users with a matching name regardless of the casing' do
+ expect(described_class.search(user2.name.upcase)).to eq([user2])
end
- describe 'email matching' do
- it 'returns users with a matching public email' do
- expect(described_class.search(user.public_email)).to match_array([user])
- end
+ it 'returns users with a exact matching name shorter than 3 chars' do
+ expect(described_class.search(user3.name)).to eq([user3])
+ end
- it 'does not return users with a partially matching public email' do
- expect(described_class.search(user.public_email[1...-1])).to be_empty
- end
+ it 'returns users with a exact matching name shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.name.upcase)).to eq([user3])
+ end
- it 'returns users with a matching public email regardless of the casing' do
- expect(described_class.search(user.public_email.upcase)).to match_array([user])
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching name' do
+ expect(described_class.search('u', use_minimum_char_limit: false)).to eq([user3, user, user2])
end
+ end
+ end
- it 'does not return users with a matching private email' do
- expect(described_class.search(user.email)).to be_empty
-
- expect(described_class.search(unconfirmed_secondary_email.email)).to be_empty
- expect(described_class.search(confirmed_secondary_email.email)).to be_empty
- end
+ describe 'email matching' do
+ it 'returns users with a matching public email' do
+ expect(described_class.search(user.public_email)).to match_array([user])
+ end
- context 'with private emails search' do
- it 'returns users with matching private primary email' do
- expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
- end
+ it 'does not return users with a partially matching public email' do
+ expect(described_class.search(user.public_email[1...-1])).to be_empty
+ end
- it 'returns users with matching private unconfirmed primary email' do
- expect(described_class.search(unconfirmed_user.email, with_private_emails: true)).to match_array([unconfirmed_user])
- end
+ it 'returns users with a matching public email regardless of the casing' do
+ expect(described_class.search(user.public_email.upcase)).to match_array([user])
+ end
- it 'returns users with matching private confirmed secondary email' do
- expect(described_class.search(confirmed_secondary_email.email, with_private_emails: true)).to match_array([user])
- end
+ it 'does not return users with a matching private email' do
+ expect(described_class.search(user.email)).to be_empty
- it 'does not return users with matching private unconfirmed secondary email' do
- expect(described_class.search(unconfirmed_secondary_email.email, with_private_emails: true)).to be_empty
- end
- end
+ expect(described_class.search(unconfirmed_secondary_email.email)).to be_empty
+ expect(described_class.search(confirmed_secondary_email.email)).to be_empty
end
- describe 'username matching' do
- it 'returns users with a matching username' do
- expect(described_class.search(user.username)).to eq([user, user2])
+ context 'with private emails search' do
+ it 'returns users with matching private primary email' do
+ expect(described_class.search(user.email, with_private_emails: true)).to match_array([user])
end
- it 'returns users with a matching username starting with a @' do
- expect(described_class.search("@#{user.username}")).to eq([user, user2])
+ it 'returns users with matching private unconfirmed primary email' do
+ expect(described_class.search(unconfirmed_user.email, with_private_emails: true)).to match_array([unconfirmed_user])
end
- it 'returns users with a partially matching username' do
- expect(described_class.search(user.username[0..2])).to eq([user, user2])
+ it 'returns users with matching private confirmed secondary email' do
+ expect(described_class.search(confirmed_secondary_email.email, with_private_emails: true)).to match_array([user])
end
- it 'returns users with a partially matching username starting with @' do
- expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
+ it 'does not return users with matching private unconfirmed secondary email' do
+ expect(described_class.search(unconfirmed_secondary_email.email, with_private_emails: true)).to be_empty
end
+ end
+ end
- it 'returns users with a matching username regardless of the casing' do
- expect(described_class.search(user2.username.upcase)).to eq([user2])
- end
+ describe 'username matching' do
+ it 'returns users with a matching username' do
+ expect(described_class.search(user.username)).to eq([user, user2])
+ end
- it 'returns users with a exact matching username shorter than 3 chars' do
- expect(described_class.search(user3.username)).to eq([user3])
- end
+ it 'returns users with a matching username starting with a @' do
+ expect(described_class.search("@#{user.username}")).to eq([user, user2])
+ end
- it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
- expect(described_class.search(user3.username.upcase)).to eq([user3])
- end
+ it 'returns users with a partially matching username' do
+ expect(described_class.search(user.username[0..2])).to eq([user, user2])
+ end
- context 'when use_minimum_char_limit is false' do
- it 'returns users with a partially matching username' do
- expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
- end
- end
+ it 'returns users with a partially matching username starting with @' do
+ expect(described_class.search("@#{user.username[0..2]}")).to eq([user, user2])
end
- it 'returns no matches for an empty string' do
- expect(described_class.search('')).to be_empty
+ it 'returns users with a matching username regardless of the casing' do
+ expect(described_class.search(user2.username.upcase)).to eq([user2])
end
- it 'returns no matches for nil' do
- expect(described_class.search(nil)).to be_empty
+ it 'returns users with a exact matching username shorter than 3 chars' do
+ expect(described_class.search(user3.username)).to eq([user3])
end
- end
- end
- context 'when the use_keyset_aware_user_search_query FF is on' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: true)
- end
+ it 'returns users with a exact matching username shorter than 3 chars regardless of the casing' do
+ expect(described_class.search(user3.username.upcase)).to eq([user3])
+ end
- it_behaves_like '.search examples'
- end
+ context 'when use_minimum_char_limit is false' do
+ it 'returns users with a partially matching username' do
+ expect(described_class.search('se', use_minimum_char_limit: false)).to eq([user3, user, user2])
+ end
+ end
+ end
- context 'when the use_keyset_aware_user_search_query FF is off' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: false)
+ it 'returns no matches for an empty string' do
+ expect(described_class.search('')).to be_empty
end
- it_behaves_like '.search examples'
+ it 'returns no matches for nil' do
+ expect(described_class.search(nil)).to be_empty
+ end
end
describe '.user_search_minimum_char_limit' do
@@ -3019,17 +2989,53 @@ RSpec.describe User do
end
end
+ shared_examples "find user by login" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:invalid_login) { "#{user.username}-NOT-EXISTS" }
+
+ context 'when login is nil or empty' do
+ it 'returns nil' do
+ expect(login_method(nil)).to be_nil
+ expect(login_method('')).to be_nil
+ end
+ end
+
+ context 'when login is invalid' do
+ it 'returns nil' do
+ expect(login_method(invalid_login)).to be_nil
+ end
+ end
+
+ context 'when login is username' do
+ it 'returns user' do
+ expect(login_method(user.username)).to eq(user)
+ expect(login_method(user.username.downcase)).to eq(user)
+ expect(login_method(user.username.upcase)).to eq(user)
+ end
+ end
+
+ context 'when login is email' do
+ it 'returns user' do
+ expect(login_method(user.email)).to eq(user)
+ expect(login_method(user.email.downcase)).to eq(user)
+ expect(login_method(user.email.upcase)).to eq(user)
+ end
+ end
+ end
+
describe '.by_login' do
- let(:username) { 'John' }
- let!(:user) { create(:user, username: username) }
+ it_behaves_like "find user by login" do
+ def login_method(login)
+ described_class.by_login(login).take
+ end
+ end
+ end
- it 'gets the correct user' do
- expect(described_class.by_login(user.email.upcase)).to eq user
- expect(described_class.by_login(user.email)).to eq user
- expect(described_class.by_login(username.downcase)).to eq user
- expect(described_class.by_login(username)).to eq user
- expect(described_class.by_login(nil)).to be_nil
- expect(described_class.by_login('')).to be_nil
+ describe '.find_by_login' do
+ it_behaves_like "find user by login" do
+ def login_method(login)
+ described_class.find_by_login(login)
+ end
end
end
@@ -5120,7 +5126,6 @@ RSpec.describe User do
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_merge_requests_count'])
expect(cache_mock).to receive(:delete).with(['users', user.id, 'review_requested_open_merge_requests_count'])
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
@@ -5128,20 +5133,6 @@ RSpec.describe User do
end
end
- describe '#invalidate_attention_requested_count' do
- let(:user) { build_stubbed(:user) }
-
- it 'invalidates cache for issue counter' do
- cache_mock = double
-
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
-
- allow(Rails).to receive(:cache).and_return(cache_mock)
-
- user.invalidate_attention_requested_count
- end
- end
-
describe '#invalidate_personal_projects_count' do
let(:user) { build_stubbed(:user) }
@@ -5228,43 +5219,6 @@ RSpec.describe User do
end
end
- describe '#attention_requested_open_merge_requests_count' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
- let(:archived_project) { create(:project, :public, :archived) }
-
- before do
- mr1 = create(:merge_request, source_project: project, author: user, reviewers: [user])
- mr2 = create(:merge_request, :closed, source_project: project, author: user, reviewers: [user])
- mr3 = create(:merge_request, source_project: archived_project, author: user, reviewers: [user])
-
- mr1.find_reviewer(user).update!(state: :attention_requested)
- mr2.find_reviewer(user).update!(state: :attention_requested)
- mr3.find_reviewer(user).update!(state: :attention_requested)
- end
-
- it 'returns number of open merge requests from non-archived projects' do
- expect(Rails.cache).not_to receive(:fetch)
- expect(user.attention_requested_open_merge_requests_count(force: true)).to eq 1
- end
-
- context 'when uncached_mr_attention_requests_count is disabled' do
- before do
- stub_feature_flags(uncached_mr_attention_requests_count: false)
- end
-
- it 'fetches from cache' do
- expect(Rails.cache).to receive(:fetch).with(
- user.attention_request_cache_key,
- force: false,
- expires_in: described_class::COUNT_CACHE_VALIDITY_PERIOD
- ).and_call_original
-
- expect(user.attention_requested_open_merge_requests_count).to eq 1
- end
- end
- end
-
describe '#assigned_open_issues_count' do
it 'returns number of open issues from non-archived projects' do
user = create(:user)
@@ -6158,8 +6112,9 @@ RSpec.describe User do
end
context 'user with a bcrypt password hash' do
- # Plaintext password 'eiFubohV6iro'
- let(:encrypted_password) { '$2a$10$xLTxCKOa75IU4RQGqqOrTuZOgZdJEzfSzjG6ZSEi/C31TB/yLZYpi' }
+ # Manually set a 'known' encrypted password
+ let(:password) { User.random_password }
+ let(:encrypted_password) { Devise::Encryptor.digest(User, password) }
let(:user) { create(:user, encrypted_password: encrypted_password) }
shared_examples 'not re-encrypting with PBKDF2' do
@@ -6171,9 +6126,12 @@ RSpec.describe User do
end
context 'using the wrong password' do
+ # password 'WRONG PASSWORD' will not match the bcrypt hash
let(:password) { 'WRONG PASSWORD' }
+ let(:encrypted_password) { Devise::Encryptor.digest(User, User.random_password) }
it { is_expected.to be_falsey }
+
it_behaves_like 'not re-encrypting with PBKDF2'
context 'when pbkdf2_password_encryption is disabled' do
@@ -6182,13 +6140,12 @@ RSpec.describe User do
end
it { is_expected.to be_falsey }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
context 'using the correct password' do
- let(:password) { 'eiFubohV6iro' }
-
it { is_expected.to be_truthy }
it 'validates the password and re-encrypts with PBKDF2' do
@@ -6207,6 +6164,7 @@ RSpec.describe User do
end
it { is_expected.to be_truthy }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
@@ -6216,14 +6174,18 @@ RSpec.describe User do
end
it { is_expected.to be_truthy }
+
it_behaves_like 'not re-encrypting with PBKDF2'
end
end
end
context 'user with password hash that is neither PBKDF2 nor BCrypt' do
- let(:user) { create(:user, encrypted_password: '$argon2i$v=19$m=512,t=4,p=2$eM+ZMyYkpDRGaI3xXmuNcQ$c5DeJg3eb5dskVt1mDdxfw') }
- let(:password) { 'password' }
+ # Manually calculated User.random_password
+ let(:password) { "gg_w215TmVXGWSt7RJKXwYTVz886f6SDM3zvzztaJf2mX9ttUE8gRkNJSbWyWRLqxz4LFzxBekPe75ydDcGauE9wqg-acKMRT-WpSYjTm1Rdx-tnssE7CQByJcnxwWNH" }
+ # Created with https://argon2.online/ using 'aaaaaaaa' as the salt
+ let(:encrypted_password) { "$argon2i$v=19$m=512,t=4,p=2$YWFhYWFhYWE$PvJscKO5XRlevcgRReUg6w" }
+ let(:user) { create(:user, encrypted_password: encrypted_password) }
it { is_expected.to be_falsey }
@@ -6240,7 +6202,7 @@ RSpec.describe User do
# These entire test section can be removed once the :pbkdf2_password_encryption feature flag is removed.
describe '#password=' do
let(:user) { create(:user) }
- let(:password) { 'Oot5iechahqu' }
+ let(:password) { User.random_password }
def compare_bcrypt_password(user, password)
Devise::Encryptor.compare(User, user.encrypted_password, password)
@@ -6422,7 +6384,7 @@ RSpec.describe User do
end
context 'when password_automatically_set is true' do
- let(:user) { create(:omniauth_user, provider: 'ldap')}
+ let(:user) { create(:omniauth_user, provider: 'ldap') }
it_behaves_like 'password expired not applicable'
end
@@ -6701,6 +6663,40 @@ RSpec.describe User do
end
end
+ describe '#dismissed_callout_for_project?' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ context 'when no callout dismissal record exists' do
+ it 'returns false when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project)).to eq false
+ end
+ end
+
+ context 'when dismissed callout exists' do
+ before_all do
+ create(:project_callout,
+ user: user,
+ project_id: project.id,
+ feature_name: feature_name,
+ dismissed_at: 4.months.ago)
+ end
+
+ it 'returns true when no ignore_dismissal_earlier_than provided' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project)).to eq true
+ end
+
+ it 'returns true when ignore_dismissal_earlier_than is earlier than dismissed_at' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project, ignore_dismissal_earlier_than: 6.months.ago)).to eq true
+ end
+
+ it 'returns false when ignore_dismissal_earlier_than is later than dismissed_at' do
+ expect(user.dismissed_callout_for_project?(feature_name: feature_name, project: project, ignore_dismissal_earlier_than: 3.months.ago)).to eq false
+ end
+ end
+ end
+
describe '#find_or_initialize_group_callout' do
let_it_be(:user, refind: true) { create(:user) }
let_it_be(:group) { create(:group) }
@@ -6745,6 +6741,50 @@ RSpec.describe User do
end
end
+ describe '#find_or_initialize_project_callout' do
+ let_it_be(:user, refind: true) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ subject(:callout_with_source) do
+ user.find_or_initialize_project_callout(feature_name, project.id)
+ end
+
+ context 'when callout exists' do
+ let!(:callout) do
+ create(:project_callout, user: user, feature_name: feature_name, project_id: project.id)
+ end
+
+ it 'returns existing callout' do
+ expect(callout_with_source).to eq(callout)
+ end
+ end
+
+ context 'when callout does not exist' do
+ context 'when feature name is valid' do
+ it 'initializes a new callout' do
+ expect(callout_with_source).to be_a_new(Users::ProjectCallout)
+ end
+
+ it 'is valid' do
+ expect(callout_with_source).to be_valid
+ end
+ end
+
+ context 'when feature name is not valid' do
+ let(:feature_name) { 'notvalid' }
+
+ it 'initializes a new callout' do
+ expect(callout_with_source).to be_a_new(Users::ProjectCallout)
+ end
+
+ it 'is not valid' do
+ expect(callout_with_source).not_to be_valid
+ end
+ end
+ end
+ end
+
describe '#hook_attrs' do
let(:user) { create(:user) }
let(:user_attributes) do
@@ -7374,4 +7414,12 @@ RSpec.describe User do
expect(another_user.mr_attention_requests_enabled?).to be(false)
end
end
+
+ describe 'user age' do
+ let(:user) { create(:user, created_at: Date.yesterday) }
+
+ it 'returns age in days' do
+ expect(user.account_age_in_days).to be(1)
+ end
+ end
end
diff --git a/spec/models/user_status_spec.rb b/spec/models/user_status_spec.rb
index 87d1fa14aca..663df9712ab 100644
--- a/spec/models/user_status_spec.rb
+++ b/spec/models/user_status_spec.rb
@@ -47,4 +47,30 @@ RSpec.describe UserStatus do
end
end
end
+
+ describe '#customized?' do
+ it 'is customized when message text is present' do
+ subject.message = 'My custom status'
+
+ expect(subject).to be_customized
+ end
+
+ it 'is not customized when message text is absent' do
+ subject.message = nil
+
+ expect(subject).not_to be_customized
+ end
+
+ it 'is customized without message but with custom emoji' do
+ subject.emoji = 'bow'
+
+ expect(subject).to be_customized
+ end
+
+ it 'is not customized without message but with default custom emoji' do
+ subject.emoji = 'speech_balloon'
+
+ expect(subject).not_to be_customized
+ end
+ end
end
diff --git a/spec/models/users/calloutable_spec.rb b/spec/models/users/calloutable_spec.rb
index 01603d8bbd6..791fe1c1bc4 100644
--- a/spec/models/users/calloutable_spec.rb
+++ b/spec/models/users/calloutable_spec.rb
@@ -15,8 +15,8 @@ RSpec.describe Users::Calloutable do
describe '#dismissed_after?' do
let(:some_feature_name) { Users::Callout.feature_names.keys.second }
- let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
- let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )}
+ let(:callout_dismissed_month_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.month.ago ) }
+ let(:callout_dismissed_day_ago) { create(:callout, feature_name: some_feature_name, dismissed_at: 1.day.ago ) }
it 'returns whether a callout dismissed after specified date' do
expect(callout_dismissed_month_ago.dismissed_after?(15.days.ago)).to eq(false)
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
index 7796b54babc..78de9ad8bdb 100644
--- a/spec/models/users/in_product_marketing_email_spec.rb
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
context 'for a track+series email' do
it { is_expected.to validate_presence_of(:track) }
it { is_expected.to validate_presence_of(:series) }
+
it {
is_expected.to validate_uniqueness_of(:user_id)
.scoped_to([:track, :series]).with_message('track series email has already been sent')
@@ -30,10 +31,12 @@ RSpec.describe Users::InProductMarketingEmail, type: :model do
it { is_expected.to validate_presence_of(:campaign) }
it { is_expected.not_to validate_presence_of(:track) }
it { is_expected.not_to validate_presence_of(:series) }
+
it {
is_expected.to validate_uniqueness_of(:user_id)
.scoped_to(:campaign).with_message('campaign email has already been sent')
}
+
it { is_expected.to validate_inclusion_of(:campaign).in_array(described_class::CAMPAIGNS) }
end
diff --git a/spec/models/users/project_callout_spec.rb b/spec/models/users/project_callout_spec.rb
new file mode 100644
index 00000000000..87d865c4bdf
--- /dev/null
+++ b/spec/models/users/project_callout_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::ProjectCallout do
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:project) { create_default(:project) }
+ let_it_be(:callout) { create(:project_callout) }
+
+ it_behaves_like 'having unique enum values'
+
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:feature_name) }
+
+ it {
+ is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id, :project_id).ignoring_case_sensitivity
+ }
+ end
+end
diff --git a/spec/models/webauthn_registration_spec.rb b/spec/models/webauthn_registration_spec.rb
index 6813854bf6c..240e7002ca3 100644
--- a/spec/models/webauthn_registration_spec.rb
+++ b/spec/models/webauthn_registration_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe WebauthnRegistration do
it { is_expected.to validate_presence_of(:counter) }
it { is_expected.to validate_length_of(:name).is_at_least(0) }
it { is_expected.not_to allow_value(nil).for(:name) }
+
it do
is_expected.to validate_numericality_of(:counter)
.only_integer
diff --git a/spec/models/wiki_page/meta_spec.rb b/spec/models/wiki_page/meta_spec.rb
index 37a282657d9..4d1a2dc1c98 100644
--- a/spec/models/wiki_page/meta_spec.rb
+++ b/spec/models/wiki_page/meta_spec.rb
@@ -89,7 +89,7 @@ RSpec.describe WikiPage::Meta do
shared_examples 'canonical_slug setting examples' do
# Constant overhead of two queries for the transaction
let(:upper_query_limit) { query_limit + 2 }
- let(:lower_query_limit) { [upper_query_limit - 1, 0].max}
+ let(:lower_query_limit) { [upper_query_limit - 1, 0].max }
let(:other_slug) { generate(:sluggified_title) }
it 'changes it to the correct value' do
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index f33c8e0a186..e2240c225a9 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -40,10 +40,13 @@ RSpec.describe WorkItem do
subject { build(:work_item).widgets }
it 'returns instances of supported widgets' do
- is_expected.to match_array([instance_of(WorkItems::Widgets::Description),
- instance_of(WorkItems::Widgets::Hierarchy),
- instance_of(WorkItems::Widgets::Assignees),
- instance_of(WorkItems::Widgets::Weight)])
+ is_expected.to include(
+ instance_of(WorkItems::Widgets::Description),
+ instance_of(WorkItems::Widgets::Hierarchy),
+ instance_of(WorkItems::Widgets::Labels),
+ instance_of(WorkItems::Widgets::Assignees),
+ instance_of(WorkItems::Widgets::StartAndDueDate)
+ )
end
end
@@ -107,5 +110,61 @@ RSpec.describe WorkItem do
it { is_expected.to eq(false) }
end
end
+
+ describe 'confidentiality' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when parent and child are confidential' do
+ let_it_be(:parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make child non-confidential' do
+ child.confidential = false
+
+ expect(child).not_to be_valid
+ expect(child.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+
+ it 'allows to make parent non-confidential' do
+ parent.confidential = false
+
+ expect(parent).to be_valid
+ end
+ end
+
+ context 'when parent and child are non-confidential' do
+ let_it_be(:parent) { create(:work_item, project: project) }
+ let_it_be(:child) { create(:work_item, :task, project: project) }
+ let_it_be(:link) { create(:parent_link, work_item: child, work_item_parent: parent) }
+
+ it 'does not allow to make parent confidential' do
+ parent.confidential = true
+
+ expect(parent).not_to be_valid
+ expect(parent.errors[:confidential])
+ .to include('confidential parent can not be used if there are non-confidential children.')
+ end
+
+ it 'allows to make child confidential' do
+ child.confidential = true
+
+ expect(child).to be_valid
+ end
+ end
+
+ context 'when creating new child' do
+ let_it_be(:child) { build(:work_item, project: project) }
+
+ it 'does not allow to set confidential parent' do
+ child.work_item_parent = create(:work_item, confidential: true, project: project)
+
+ expect(child).not_to be_valid
+ expect(child.errors[:confidential])
+ .to include('associated parent is confidential and can not have non-confidential children.')
+ end
+ end
+ end
end
end
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index a16b15bbfc9..070b2eef86a 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -69,6 +69,70 @@ RSpec.describe WorkItems::ParentLink do
expect(link1).to be_valid
end
end
+
+ context 'when setting confidentiality' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:confidential_parent, :confidential_child, :valid) do
+ false | false | true
+ true | true | true
+ false | true | true
+ true | false | false
+ end
+
+ with_them do
+ before do
+ issue.confidential = confidential_parent
+ task1.confidential = confidential_child
+ end
+
+ it 'validates if child confidentiality is compatible with parent' do
+ link = build(:parent_link, work_item_parent: issue, work_item: task1)
+
+ expect(link.valid?).to eq(valid)
+ end
+ end
+ end
+ end
+ end
+
+ context 'with confidential work items' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:confidential_child) { create(:work_item, :task, confidential: true, project: project) }
+ let_it_be(:putlic_child) { create(:work_item, :task, project: project) }
+ let_it_be(:confidential_parent) { create(:work_item, confidential: true, project: project) }
+ let_it_be(:public_parent) { create(:work_item, project: project) }
+
+ describe '.has_public_children?' do
+ subject { described_class.has_public_children?(public_parent.id) }
+
+ context 'with confidential child' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_falsey }
+
+ context 'with also public child' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: putlic_child) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '.has_confidential_parent?' do
+ subject { described_class.has_confidential_parent?(confidential_child.id) }
+
+ context 'with confidential parent' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: confidential_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with public parent' do
+ let_it_be(:link) { create(:parent_link, work_item_parent: public_parent, work_item: confidential_child) }
+
+ it { is_expected.to be_falsey }
+ end
end
end
end
diff --git a/spec/models/work_items/type_spec.rb b/spec/models/work_items/type_spec.rb
index e91617effc0..e41df7f0f61 100644
--- a/spec/models/work_items/type_spec.rb
+++ b/spec/models/work_items/type_spec.rb
@@ -64,10 +64,13 @@ RSpec.describe WorkItems::Type do
subject { described_class.available_widgets }
it 'returns list of all possible widgets' do
- is_expected.to match_array([::WorkItems::Widgets::Description,
- ::WorkItems::Widgets::Hierarchy,
- ::WorkItems::Widgets::Assignees,
- ::WorkItems::Widgets::Weight])
+ is_expected.to include(
+ ::WorkItems::Widgets::Description,
+ ::WorkItems::Widgets::Hierarchy,
+ ::WorkItems::Widgets::Labels,
+ ::WorkItems::Widgets::Assignees,
+ ::WorkItems::Widgets::StartAndDueDate
+ )
end
end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
index ab2bcfee13f..cd528772710 100644
--- a/spec/models/work_items/widgets/hierarchy_spec.rb
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#parent' do
- let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent) }
+ let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent).reload }
subject { described_class.new(parent_link.work_item).parent }
@@ -45,8 +45,8 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#children' do
- let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task) }
- let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent) }
+ let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task).reload }
+ let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent).reload }
subject { described_class.new(work_item_parent).children }
diff --git a/spec/models/work_items/widgets/labels_spec.rb b/spec/models/work_items/widgets/labels_spec.rb
new file mode 100644
index 00000000000..15e8aaa1cf3
--- /dev/null
+++ b/spec/models/work_items/widgets/labels_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::Labels do
+ let_it_be(:work_item) { create(:work_item, labels: [create(:label)]) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:labels) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:labels) }
+ end
+
+ describe '#labels' do
+ subject { described_class.new(work_item).labels }
+
+ it { is_expected.to eq(work_item.labels) }
+ end
+
+ describe '#allowScopedLabels' do
+ subject { described_class.new(work_item).allows_scoped_labels? }
+
+ it { is_expected.to eq(work_item.allows_scoped_labels?) }
+ end
+end
diff --git a/spec/models/work_items/widgets/start_and_due_date_spec.rb b/spec/models/work_items/widgets/start_and_due_date_spec.rb
new file mode 100644
index 00000000000..b023cc73e0f
--- /dev/null
+++ b/spec/models/work_items/widgets/start_and_due_date_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::StartAndDueDate do
+ let_it_be(:work_item) { create(:work_item, start_date: Date.today, due_date: 1.week.from_now) }
+
+ describe '.type' do
+ subject { described_class.type }
+
+ it { is_expected.to eq(:start_and_due_date) }
+ end
+
+ describe '#type' do
+ subject { described_class.new(work_item).type }
+
+ it { is_expected.to eq(:start_and_due_date) }
+ end
+
+ describe '#start_date' do
+ subject { described_class.new(work_item).start_date }
+
+ it { is_expected.to eq(work_item.start_date) }
+ end
+
+ describe '#due_date' do
+ subject { described_class.new(work_item).due_date }
+
+ it { is_expected.to eq(work_item.due_date) }
+ end
+end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 3ef859376a4..57923142648 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe GroupPolicy do
include_context 'GroupPolicy context'
- using RSpec::Parameterized::TableSyntax
context 'public group with no user' do
let(:group) { create(:group, :public, :crm_enabled) }
@@ -1231,29 +1230,11 @@ RSpec.describe GroupPolicy do
it { is_expected.to be_disallowed(:admin_crm_organization) }
end
- describe 'maintain_namespace' do
- context 'with non-admin roles' do
- where(:role, :allowed) do
- :guest | false
- :reporter | false
- :developer | false
- :maintainer | true
- :owner | true
- end
-
- with_them do
- let(:current_user) { public_send(role) }
+ it_behaves_like 'checks timelog categories permissions' do
+ let(:group) { create(:group) }
+ let(:namespace) { group }
+ let(:users_container) { group }
- it do
- expect(subject.allowed?(:maintain_namespace)).to eq allowed
- end
- end
- end
-
- context 'as an admin', :enable_admin_mode do
- let(:current_user) { admin }
-
- it { is_expected.to be_allowed(:maintain_namespace) }
- end
+ subject { described_class.new(current_user, group) }
end
end
diff --git a/spec/policies/issuable_policy_spec.rb b/spec/policies/issuable_policy_spec.rb
index 5e2a307e959..706570babd5 100644
--- a/spec/policies/issuable_policy_spec.rb
+++ b/spec/policies/issuable_policy_spec.rb
@@ -113,5 +113,45 @@ RSpec.describe IssuablePolicy, models: true do
end
end
end
+
+ context 'when user is anonymous' do
+ it 'does not allow timelogs creation' do
+ expect(permissions(nil, issue)).to be_disallowed(:create_timelog)
+ end
+ end
+
+ context 'when user is not a member of the project' do
+ it 'does not allow timelogs creation' do
+ expect(policies).to be_disallowed(:create_timelog)
+ end
+ end
+
+ context 'when user is not a member of the project but the author of the issuable' do
+ let(:issue) { create(:issue, project: project, author: user) }
+
+ it 'does not allow timelogs creation' do
+ expect(policies).to be_disallowed(:create_timelog)
+ end
+ end
+
+ context 'when user is a guest member of the project' do
+ it 'does not allow timelogs creation' do
+ expect(permissions(guest, issue)).to be_disallowed(:create_timelog)
+ end
+ end
+
+ context 'when user is a guest member of the project and the author of the issuable' do
+ let(:issue) { create(:issue, project: project, author: guest) }
+
+ it 'does not allow timelogs creation' do
+ expect(permissions(guest, issue)).to be_disallowed(:create_timelog)
+ end
+ end
+
+ context 'when user is at least reporter of the project' do
+ it 'allows timelogs creation' do
+ expect(permissions(reporter, issue)).to be_allowed(:create_timelog)
+ end
+ end
end
end
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index fefbb59a830..7ca4baddb79 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -3,7 +3,9 @@
require 'spec_helper'
RSpec.describe IssuePolicy do
+ include_context 'ProjectPolicyTable context'
include ExternalAuthorizationServiceHelpers
+ include ProjectHelpers
let(:guest) { create(:user) }
let(:author) { create(:user) }
@@ -50,6 +52,19 @@ RSpec.describe IssuePolicy do
end
end
+ shared_examples 'grants the expected permissions' do |policy|
+ specify do
+ enable_admin_mode!(user) if admin_mode
+ update_feature_access_level(project, feature_access_level)
+
+ if expected_count == 1
+ expect(permissions(user, issue)).to be_allowed(policy)
+ else
+ expect(permissions(user, issue)).to be_disallowed(policy)
+ end
+ end
+ end
+
context 'a private project' do
let(:project) { create(:project, :private) }
let(:issue) { create(:issue, project: project, assignees: [assignee], author: author) }
@@ -85,7 +100,6 @@ RSpec.describe IssuePolicy do
it 'allows reporters from group links to read, update, and admin issues' do
expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(reporter_from_group_link, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
@@ -217,7 +231,7 @@ RSpec.describe IssuePolicy do
it 'allows reporters from group links to read, update, reopen and admin issues' do
expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:reopen_issue)
expect(permissions(reporter_from_group_link, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(reporter_from_group_link, issue_locked)).to be_disallowed(:reopen_issue)
expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
@@ -295,18 +309,23 @@ RSpec.describe IssuePolicy do
it 'forbids visitors from viewing issues' do
expect(permissions(visitor, issue)).to be_disallowed(:read_issue)
end
+
it 'forbids visitors from commenting' do
expect(permissions(visitor, issue)).to be_disallowed(:create_note)
end
+
it 'forbids visitors from subscribing' do
expect(permissions(visitor, issue)).to be_disallowed(:update_subscription)
end
+
it 'allows guests to view' do
expect(permissions(guest, issue)).to be_allowed(:read_issue)
end
+
it 'allows guests to comment' do
expect(permissions(guest, issue)).to be_allowed(:create_note)
end
+
it 'allows guests to subscribe' do
expect(permissions(guest, issue)).to be_allowed(:update_subscription)
end
@@ -454,7 +473,7 @@ RSpec.describe IssuePolicy do
end
end
- context 'when peronsal namespace' do
+ context 'when personal namespace' do
let(:project) { create(:project) }
it 'is disallowed' do
@@ -465,4 +484,34 @@ RSpec.describe IssuePolicy do
end
end
end
+
+ context 'when user is an inherited member from the group' do
+ let(:user) { create_user_from_membership(group, membership) }
+ let(:project) { create(:project, project_level, group: group) }
+ let(:issue) { create(:issue, project: project) }
+
+ context 'and policy allows guest access' do
+ where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
+ permission_table_for_guest_feature_access
+ end
+
+ with_them do
+ it_behaves_like 'grants the expected permissions', :read_issue
+ it_behaves_like 'grants the expected permissions', :read_issue_iid
+ end
+ end
+
+ context 'and policy allows reporter access' do
+ where(:project_level, :feature_access_level, :membership, :admin_mode, :expected_count) do
+ permission_table_for_reporter_issue_access
+ end
+
+ with_them do
+ it_behaves_like 'grants the expected permissions', :update_issue
+ it_behaves_like 'grants the expected permissions', :admin_issue
+ it_behaves_like 'grants the expected permissions', :set_issue_metadata
+ it_behaves_like 'grants the expected permissions', :set_confidentiality
+ end
+ end
+ end
end
diff --git a/spec/policies/namespaces/project_namespace_policy_spec.rb b/spec/policies/namespaces/project_namespace_policy_spec.rb
index 5ceea9dfb9d..4519f44a6ad 100644
--- a/spec/policies/namespaces/project_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/project_namespace_policy_spec.rb
@@ -3,45 +3,11 @@
require 'spec_helper'
RSpec.describe Namespaces::ProjectNamespacePolicy do
- let_it_be(:parent) { create(:namespace) }
- let_it_be(:project) { create(:project, namespace: parent) }
- let_it_be(:namespace) { project.project_namespace }
-
- let(:permissions) do
- [:owner_access, :create_projects, :admin_namespace, :read_namespace,
- :read_statistics, :transfer_projects, :admin_package,
- :create_jira_connect_subscription]
- end
-
subject { described_class.new(current_user, namespace) }
- context 'with no user' do
- let_it_be(:current_user) { nil }
-
- it { is_expected.to be_disallowed(*permissions) }
- end
-
- context 'regular user' do
- let_it_be(:current_user) { create(:user) }
-
- it { is_expected.to be_disallowed(*permissions) }
- end
-
- context 'parent owner' do
- let_it_be(:current_user) { parent.first_owner }
-
- it { is_expected.to be_disallowed(*permissions) }
- end
-
- context 'admin' do
- let_it_be(:current_user) { create(:admin) }
-
- context 'when admin mode is enabled', :enable_admin_mode do
- it { is_expected.to be_disallowed(*permissions) }
- end
-
- context 'when admin mode is disabled' do
- it { is_expected.to be_disallowed(*permissions) }
- end
+ it_behaves_like 'checks timelog categories permissions' do
+ let(:project) { create(:project) }
+ let(:namespace) { project.project_namespace }
+ let(:users_container) { project }
end
end
diff --git a/spec/policies/namespaces/user_namespace_policy_spec.rb b/spec/policies/namespaces/user_namespace_policy_spec.rb
index e8a3c9b828d..22c3f6a6d67 100644
--- a/spec/policies/namespaces/user_namespace_policy_spec.rb
+++ b/spec/policies/namespaces/user_namespace_policy_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Namespaces::UserNamespacePolicy do
let_it_be(:admin) { create(:admin) }
let_it_be(:namespace) { create(:user_namespace, owner: owner) }
- let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package, :maintain_namespace] }
+ let(:owner_permissions) { [:owner_access, :create_projects, :admin_namespace, :read_namespace, :read_statistics, :transfer_projects, :admin_package] }
subject { described_class.new(current_user, namespace) }
diff --git a/spec/policies/project_hook_policy_spec.rb b/spec/policies/project_hook_policy_spec.rb
new file mode 100644
index 00000000000..cfa7b6ee4bf
--- /dev/null
+++ b/spec/policies/project_hook_policy_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectHookPolicy do
+ let_it_be(:user) { create(:user) }
+
+ let(:hook) { create(:project_hook) }
+
+ subject(:policy) { described_class.new(user, hook) }
+
+ context 'when the user is not a maintainer' do
+ before do
+ hook.project.add_developer(user)
+ end
+
+ it "cannot read and destroy web-hooks" do
+ expect(policy).to be_disallowed(:read_web_hook, :destroy_web_hook)
+ end
+ end
+
+ context 'when the user is a maintainer' do
+ before do
+ hook.project.add_maintainer(user)
+ end
+
+ it "can read and destroy web-hooks" do
+ expect(policy).to be_allowed(:read_web_hook, :destroy_web_hook)
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index c041c72a0be..e8fdf9a8e25 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -1930,6 +1930,10 @@ RSpec.describe ProjectPolicy do
describe 'operations feature' do
using RSpec::Parameterized::TableSyntax
+ before do
+ stub_feature_flags(split_operations_visibility_permissions: false)
+ end
+
let(:guest_operations_permissions) { [:read_environment, :read_deployment] }
let(:developer_operations_permissions) do
@@ -2002,38 +2006,234 @@ RSpec.describe ProjectPolicy do
end
end
- def project_subject(project_type)
- case project_type
- when :public
- public_project
- when :internal
- internal_project
+ def permissions_abilities(role)
+ case role
+ when :maintainer
+ maintainer_operations_permissions
+ when :developer
+ developer_operations_permissions
else
- private_project
+ guest_operations_permissions
end
end
+ end
+ end
- def user_subject(role)
- case role
- when :maintainer
- maintainer
- when :developer
- developer
- when :guest
- guest
- when :anonymous
- anonymous
+ describe 'environments feature' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:guest_environments_permissions) { [:read_environment, :read_deployment] }
+
+ let(:developer_environments_permissions) do
+ guest_environments_permissions + [
+ :create_environment, :create_deployment, :update_environment, :update_deployment, :destroy_environment
+ ]
+ end
+
+ let(:maintainer_environments_permissions) do
+ developer_environments_permissions + [:admin_environment, :admin_deployment]
+ end
+
+ where(:project_visibility, :access_level, :role, :allowed) do
+ :public | ProjectFeature::ENABLED | :maintainer | true
+ :public | ProjectFeature::ENABLED | :developer | true
+ :public | ProjectFeature::ENABLED | :guest | true
+ :public | ProjectFeature::ENABLED | :anonymous | true
+ :public | ProjectFeature::PRIVATE | :maintainer | true
+ :public | ProjectFeature::PRIVATE | :developer | true
+ :public | ProjectFeature::PRIVATE | :guest | true
+ :public | ProjectFeature::PRIVATE | :anonymous | false
+ :public | ProjectFeature::DISABLED | :maintainer | false
+ :public | ProjectFeature::DISABLED | :developer | false
+ :public | ProjectFeature::DISABLED | :guest | false
+ :public | ProjectFeature::DISABLED | :anonymous | false
+ :internal | ProjectFeature::ENABLED | :maintainer | true
+ :internal | ProjectFeature::ENABLED | :developer | true
+ :internal | ProjectFeature::ENABLED | :guest | true
+ :internal | ProjectFeature::ENABLED | :anonymous | false
+ :internal | ProjectFeature::PRIVATE | :maintainer | true
+ :internal | ProjectFeature::PRIVATE | :developer | true
+ :internal | ProjectFeature::PRIVATE | :guest | true
+ :internal | ProjectFeature::PRIVATE | :anonymous | false
+ :internal | ProjectFeature::DISABLED | :maintainer | false
+ :internal | ProjectFeature::DISABLED | :developer | false
+ :internal | ProjectFeature::DISABLED | :guest | false
+ :internal | ProjectFeature::DISABLED | :anonymous | false
+ :private | ProjectFeature::ENABLED | :maintainer | true
+ :private | ProjectFeature::ENABLED | :developer | true
+ :private | ProjectFeature::ENABLED | :guest | false
+ :private | ProjectFeature::ENABLED | :anonymous | false
+ :private | ProjectFeature::PRIVATE | :maintainer | true
+ :private | ProjectFeature::PRIVATE | :developer | true
+ :private | ProjectFeature::PRIVATE | :guest | false
+ :private | ProjectFeature::PRIVATE | :anonymous | false
+ :private | ProjectFeature::DISABLED | :maintainer | false
+ :private | ProjectFeature::DISABLED | :developer | false
+ :private | ProjectFeature::DISABLED | :guest | false
+ :private | ProjectFeature::DISABLED | :anonymous | false
+ end
+
+ with_them do
+ let(:current_user) { user_subject(role) }
+ let(:project) { project_subject(project_visibility) }
+
+ it 'allows/disallows the abilities based on the environments feature access level' do
+ project.project_feature.update!(environments_access_level: access_level)
+
+ if allowed
+ expect_allowed(*permissions_abilities(role))
+ else
+ expect_disallowed(*permissions_abilities(role))
end
end
def permissions_abilities(role)
case role
when :maintainer
- maintainer_operations_permissions
+ maintainer_environments_permissions
when :developer
- developer_operations_permissions
+ developer_environments_permissions
else
- guest_operations_permissions
+ guest_environments_permissions
+ end
+ end
+ end
+ end
+
+ describe 'feature flags feature' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:guest_permissions) { [] }
+
+ let(:developer_permissions) do
+ guest_permissions + [
+ :read_feature_flag, :create_feature_flag, :update_feature_flag, :destroy_feature_flag, :admin_feature_flag,
+ :admin_feature_flags_user_lists
+ ]
+ end
+
+ let(:maintainer_permissions) do
+ developer_permissions + [:admin_feature_flags_client]
+ end
+
+ where(:project_visibility, :access_level, :role, :allowed) do
+ :public | ProjectFeature::ENABLED | :maintainer | true
+ :public | ProjectFeature::ENABLED | :developer | true
+ :public | ProjectFeature::ENABLED | :guest | true
+ :public | ProjectFeature::ENABLED | :anonymous | true
+ :public | ProjectFeature::PRIVATE | :maintainer | true
+ :public | ProjectFeature::PRIVATE | :developer | true
+ :public | ProjectFeature::PRIVATE | :guest | true
+ :public | ProjectFeature::PRIVATE | :anonymous | false
+ :public | ProjectFeature::DISABLED | :maintainer | false
+ :public | ProjectFeature::DISABLED | :developer | false
+ :public | ProjectFeature::DISABLED | :guest | false
+ :public | ProjectFeature::DISABLED | :anonymous | false
+ :internal | ProjectFeature::ENABLED | :maintainer | true
+ :internal | ProjectFeature::ENABLED | :developer | true
+ :internal | ProjectFeature::ENABLED | :guest | true
+ :internal | ProjectFeature::ENABLED | :anonymous | false
+ :internal | ProjectFeature::PRIVATE | :maintainer | true
+ :internal | ProjectFeature::PRIVATE | :developer | true
+ :internal | ProjectFeature::PRIVATE | :guest | true
+ :internal | ProjectFeature::PRIVATE | :anonymous | false
+ :internal | ProjectFeature::DISABLED | :maintainer | false
+ :internal | ProjectFeature::DISABLED | :developer | false
+ :internal | ProjectFeature::DISABLED | :guest | false
+ :internal | ProjectFeature::DISABLED | :anonymous | false
+ :private | ProjectFeature::ENABLED | :maintainer | true
+ :private | ProjectFeature::ENABLED | :developer | true
+ :private | ProjectFeature::ENABLED | :guest | false
+ :private | ProjectFeature::ENABLED | :anonymous | false
+ :private | ProjectFeature::PRIVATE | :maintainer | true
+ :private | ProjectFeature::PRIVATE | :developer | true
+ :private | ProjectFeature::PRIVATE | :guest | false
+ :private | ProjectFeature::PRIVATE | :anonymous | false
+ :private | ProjectFeature::DISABLED | :maintainer | false
+ :private | ProjectFeature::DISABLED | :developer | false
+ :private | ProjectFeature::DISABLED | :guest | false
+ :private | ProjectFeature::DISABLED | :anonymous | false
+ end
+
+ with_them do
+ let(:current_user) { user_subject(role) }
+ let(:project) { project_subject(project_visibility) }
+
+ it 'allows/disallows the abilities based on the feature flags access level' do
+ project.project_feature.update!(feature_flags_access_level: access_level)
+
+ if allowed
+ expect_allowed(*permissions_abilities(role))
+ else
+ expect_disallowed(*permissions_abilities(role))
+ end
+ end
+ end
+ end
+
+ describe 'Releases feature' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:guest_permissions) { [:read_release] }
+
+ let(:developer_permissions) do
+ guest_permissions + [:create_release, :update_release, :destroy_release]
+ end
+
+ let(:maintainer_permissions) do
+ developer_permissions
+ end
+
+ where(:project_visibility, :access_level, :role, :allowed) do
+ :public | ProjectFeature::ENABLED | :maintainer | true
+ :public | ProjectFeature::ENABLED | :developer | true
+ :public | ProjectFeature::ENABLED | :guest | true
+ :public | ProjectFeature::ENABLED | :anonymous | true
+ :public | ProjectFeature::PRIVATE | :maintainer | true
+ :public | ProjectFeature::PRIVATE | :developer | true
+ :public | ProjectFeature::PRIVATE | :guest | true
+ :public | ProjectFeature::PRIVATE | :anonymous | false
+ :public | ProjectFeature::DISABLED | :maintainer | false
+ :public | ProjectFeature::DISABLED | :developer | false
+ :public | ProjectFeature::DISABLED | :guest | false
+ :public | ProjectFeature::DISABLED | :anonymous | false
+ :internal | ProjectFeature::ENABLED | :maintainer | true
+ :internal | ProjectFeature::ENABLED | :developer | true
+ :internal | ProjectFeature::ENABLED | :guest | true
+ :internal | ProjectFeature::ENABLED | :anonymous | false
+ :internal | ProjectFeature::PRIVATE | :maintainer | true
+ :internal | ProjectFeature::PRIVATE | :developer | true
+ :internal | ProjectFeature::PRIVATE | :guest | true
+ :internal | ProjectFeature::PRIVATE | :anonymous | false
+ :internal | ProjectFeature::DISABLED | :maintainer | false
+ :internal | ProjectFeature::DISABLED | :developer | false
+ :internal | ProjectFeature::DISABLED | :guest | false
+ :internal | ProjectFeature::DISABLED | :anonymous | false
+ :private | ProjectFeature::ENABLED | :maintainer | true
+ :private | ProjectFeature::ENABLED | :developer | true
+ :private | ProjectFeature::ENABLED | :guest | true
+ :private | ProjectFeature::ENABLED | :anonymous | false
+ :private | ProjectFeature::PRIVATE | :maintainer | true
+ :private | ProjectFeature::PRIVATE | :developer | true
+ :private | ProjectFeature::PRIVATE | :guest | true
+ :private | ProjectFeature::PRIVATE | :anonymous | false
+ :private | ProjectFeature::DISABLED | :maintainer | false
+ :private | ProjectFeature::DISABLED | :developer | false
+ :private | ProjectFeature::DISABLED | :guest | false
+ :private | ProjectFeature::DISABLED | :anonymous | false
+ end
+
+ with_them do
+ let(:current_user) { user_subject(role) }
+ let(:project) { project_subject(project_visibility) }
+
+ it 'allows/disallows the abilities based on the Releases access level' do
+ project.project_feature.update!(releases_access_level: access_level)
+
+ if allowed
+ expect_allowed(*permissions_abilities(role))
+ else
+ expect_disallowed(*permissions_abilities(role))
end
end
end
@@ -2481,4 +2681,39 @@ RSpec.describe ProjectPolicy do
end
end
end
+
+ def project_subject(project_type)
+ case project_type
+ when :public
+ public_project
+ when :internal
+ internal_project
+ else
+ private_project
+ end
+ end
+
+ def user_subject(role)
+ case role
+ when :maintainer
+ maintainer
+ when :developer
+ developer
+ when :guest
+ guest
+ when :anonymous
+ anonymous
+ end
+ end
+
+ def permissions_abilities(role)
+ case role
+ when :maintainer
+ maintainer_permissions
+ when :developer
+ developer_permissions
+ else
+ guest_permissions
+ end
+ end
end
diff --git a/spec/policies/system_hook_policy_spec.rb b/spec/policies/system_hook_policy_spec.rb
new file mode 100644
index 00000000000..37f97a8a3d1
--- /dev/null
+++ b/spec/policies/system_hook_policy_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SystemHookPolicy do
+ let(:hook) { create(:system_hook) }
+
+ subject(:policy) { described_class.new(user, hook) }
+
+ context 'when the user is not an admin' do
+ let(:user) { create(:user) }
+
+ %i[read_web_hook destroy_web_hook].each do |thing|
+ it "cannot #{thing}" do
+ expect(policy).to be_disallowed(thing)
+ end
+ end
+ end
+
+ context 'when the user is an admin', :enable_admin_mode do
+ let(:user) { create(:admin) }
+
+ %i[read_web_hook destroy_web_hook].each do |thing|
+ it "can #{thing}" do
+ expect(policy).to be_allowed(thing)
+ end
+ end
+ end
+end
diff --git a/spec/policies/timelog_policy_spec.rb b/spec/policies/timelog_policy_spec.rb
index 97e61cfe5ce..31912c637ce 100644
--- a/spec/policies/timelog_policy_spec.rb
+++ b/spec/policies/timelog_policy_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe TimelogPolicy, models: true do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800)}
+ let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
let(:user) { nil }
let(:policy) { described_class.new(user, timelog) }
diff --git a/spec/policies/upload_policy_spec.rb b/spec/policies/upload_policy_spec.rb
new file mode 100644
index 00000000000..1169df0b300
--- /dev/null
+++ b/spec/policies/upload_policy_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UploadPolicy do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:guest) { create(:user).tap { |user| group.add_guest(user) } }
+ let_it_be(:developer) { create(:user).tap { |user| group.add_developer(user) } }
+ let_it_be(:maintainer) { create(:user).tap { |user| group.add_maintainer(user) } }
+ let_it_be(:owner) { create(:user).tap { |user| group.add_owner(user) } }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:non_member_user) { create(:user) }
+
+ let(:upload_permissions) { [:read_upload, :destroy_upload] }
+
+ shared_examples_for 'uploads policy' do
+ subject { described_class.new(current_user, upload) }
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(*upload_permissions) }
+ end
+
+ context 'when user is developer' do
+ let(:current_user) { developer }
+
+ it { is_expected.to be_disallowed(*upload_permissions) }
+ end
+
+ context 'when user is maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(*upload_permissions) }
+ end
+
+ context 'when user is owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(*upload_permissions) }
+ end
+
+ context 'when user is admin' do
+ let(:current_user) { admin }
+
+ it { is_expected.to be_disallowed(*upload_permissions) }
+
+ context 'with admin mode', :enable_admin_mode do
+ it { is_expected.to be_allowed(*upload_permissions) }
+ end
+ end
+ end
+
+ describe 'destroy_upload' do
+ context 'when deleting project upload' do
+ let_it_be(:upload) { create(:upload, model: project) }
+
+ it_behaves_like 'uploads policy'
+ end
+
+ context 'when deleting group upload' do
+ let_it_be(:upload) { create(:upload, model: group) }
+
+ it_behaves_like 'uploads policy'
+ end
+
+ context 'when deleting upload associated with other model' do
+ let_it_be(:upload) { create(:upload, model: maintainer) }
+
+ subject { described_class.new(maintainer, upload) }
+
+ it { is_expected.to be_disallowed(*upload_permissions) }
+ end
+ end
+end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index f8ec7d9f9bc..ed76ec1eccf 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -63,6 +63,27 @@ RSpec.describe WorkItemPolicy do
end
end
+ describe 'admin_work_item' do
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:admin_work_item) }
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:admin_work_item) }
+
+ context 'when guest authored the work item' do
+ let(:work_item_subject) { authored_work_item }
+ let(:current_user) { guest_author }
+
+ it { is_expected.to be_disallowed(:admin_work_item) }
+ end
+ end
+ end
+
describe 'update_work_item' do
context 'when user is reporter' do
let(:current_user) { reporter }
@@ -160,4 +181,24 @@ RSpec.describe WorkItemPolicy do
end
end
end
+
+ describe 'set_work_item_metadata' do
+ context 'when user is reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:set_work_item_metadata) }
+ end
+
+ context 'when user is guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:set_work_item_metadata) }
+
+ context 'when the work item is not persisted yet' do
+ let(:work_item_subject) { build(:work_item, project: project) }
+
+ it { is_expected.to be_allowed(:set_work_item_metadata) }
+ end
+ end
+ end
end
diff --git a/spec/presenters/alert_management/alert_presenter_spec.rb b/spec/presenters/alert_management/alert_presenter_spec.rb
index 21c0cb3fead..fe228f174fe 100644
--- a/spec/presenters/alert_management/alert_presenter_spec.rb
+++ b/spec/presenters/alert_management/alert_presenter_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe AlertManagement::AlertPresenter do
it 'formats the start time of the alert' do
alert.started_at = Time.utc(2019, 5, 5)
- expect(presenter.start_time). to eq('05 May 2019, 12:00AM (UTC)')
+ expect(presenter.start_time).to eq('05 May 2019, 12:00AM (UTC)')
end
end
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index ace65307321..fe2d8f0f670 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -309,25 +309,64 @@ RSpec.describe Ci::BuildRunnerPresenter do
end
describe '#runner_variables' do
- subject { presenter.runner_variables }
+ subject(:runner_variables) { presenter.runner_variables }
let_it_be(:project) { create(:project, :repository) }
- shared_examples 'returns an array with the expected variables' do
- it 'returns an array' do
- is_expected.to be_an_instance_of(Array)
+ let(:sha) { project.repository.commit.sha }
+ let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it 'returns an array' do
+ is_expected.to be_an_instance_of(Array)
+ end
+
+ it 'returns the expected variables' do
+ is_expected.to eq(presenter.variables.to_runner_variables)
+ end
+
+ context 'when there are variables to expand' do
+ before_all do
+ create(:ci_variable, project: project,
+ key: 'regular_var',
+ value: 'value 1')
+ create(:ci_variable, project: project,
+ key: 'file_var',
+ value: 'value 2',
+ variable_type: :file)
+ create(:ci_variable, project: project,
+ key: 'var_with_variables',
+ value: 'value 3 and $regular_var and $file_var and $undefined_var')
end
- it 'returns the expected variables' do
- is_expected.to eq(presenter.variables.to_runner_variables)
+ it 'returns variables with expanded' do
+ expect(runner_variables).to include(
+ { key: 'regular_var', value: 'value 1',
+ public: false, masked: false },
+ { key: 'file_var', value: 'value 2',
+ public: false, masked: false, file: true },
+ { key: 'var_with_variables', value: 'value 3 and value 1 and $file_var and $undefined_var',
+ public: false, masked: false }
+ )
end
- end
- let(:sha) { project.repository.commit.sha }
- let(:pipeline) { create(:ci_pipeline, sha: sha, project: project) }
- let(:build) { create(:ci_build, pipeline: pipeline) }
+ context 'when the FF ci_stop_expanding_file_vars_for_runners is disabled' do
+ before do
+ stub_feature_flags(ci_stop_expanding_file_vars_for_runners: false)
+ end
- it_behaves_like 'returns an array with the expected variables'
+ it 'returns variables with expanded' do
+ expect(runner_variables).to include(
+ { key: 'regular_var', value: 'value 1',
+ public: false, masked: false },
+ { key: 'file_var', value: 'value 2',
+ public: false, masked: false, file: true },
+ { key: 'var_with_variables', value: 'value 3 and value 1 and value 2 and $undefined_var',
+ public: false, masked: false }
+ )
+ end
+ end
+ end
end
describe '#runner_variables subset' do
diff --git a/spec/presenters/merge_request_presenter_spec.rb b/spec/presenters/merge_request_presenter_spec.rb
index 798bee70e42..31aa4778d3c 100644
--- a/spec/presenters/merge_request_presenter_spec.rb
+++ b/spec/presenters/merge_request_presenter_spec.rb
@@ -17,29 +17,8 @@ RSpec.describe MergeRequestPresenter do
allow(resource).to receive(:mergeable_discussions_state?).and_return(discussions_state)
end
- context 'when change_response_code_merge_status is enabled' do
- it 'returns the mergeable_discussions_state' do
- is_expected.to eq(discussions_state)
- end
- end
-
- context 'when change_response_code_merge_status is disabled' do
- before do
- stub_feature_flags(change_response_code_merge_status: false)
- end
-
- context 'when it is not mergeable' do
- it 'returns false' do
- resource.close!
- is_expected.to eq(false)
- end
- end
-
- context 'when it is mergeable' do
- it 'returns the mergeable_discussions_state' do
- is_expected.to eq(discussions_state)
- end
- end
+ it 'returns the mergeable_discussions_state' do
+ is_expected.to eq(discussions_state)
end
end
diff --git a/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb b/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb
index be454e5168c..c966b1fc8e1 100644
--- a/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb
+++ b/spec/presenters/packages/pypi/simple_package_versions_presenter_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe ::Packages::Pypi::SimplePackageVersionsPresenter, :aggregate_fail
let(:project_or_group) { project }
- it { is_expected.not_to include(package_file_pending_destruction.file_name)}
+ it { is_expected.not_to include(package_file_pending_destruction.file_name) }
end
end
end
diff --git a/spec/presenters/project_hook_presenter_spec.rb b/spec/presenters/project_hook_presenter_spec.rb
index 2e4bd17bbe1..a85865652d8 100644
--- a/spec/presenters/project_hook_presenter_spec.rb
+++ b/spec/presenters/project_hook_presenter_spec.rb
@@ -18,10 +18,10 @@ RSpec.describe ProjectHookPresenter do
end
describe '#logs_retry_path' do
- subject { web_hook.present.logs_details_path(web_hook_log) }
+ subject { web_hook.present.logs_retry_path(web_hook_log) }
let(:expected_path) do
- "/#{project.namespace.path}/#{project.name}/-/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}"
+ "/#{project.namespace.path}/#{project.name}/-/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}/retry"
end
it { is_expected.to eq(expected_path) }
diff --git a/spec/presenters/project_member_presenter_spec.rb b/spec/presenters/project_member_presenter_spec.rb
index ad45a23c183..1cfc8cfb53b 100644
--- a/spec/presenters/project_member_presenter_spec.rb
+++ b/spec/presenters/project_member_presenter_spec.rb
@@ -55,39 +55,95 @@ RSpec.describe ProjectMemberPresenter do
end
describe '#can_update?' do
- context 'when user can update_project_member' do
+ context 'when user is NOT attempting to update an Owner' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(true)
+ allow(project_member).to receive(:owner?).and_return(false)
end
- it { expect(presenter.can_update?).to eq(true) }
+ context 'when user can update_project_member' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(true)
+ end
+
+ specify { expect(presenter.can_update?).to eq(true) }
+ end
+
+ context 'when user cannot update_project_member' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(false)
+ allow(presenter).to receive(:can?).with(user, :override_project_member, presenter).and_return(false)
+ end
+
+ specify { expect(presenter.can_update?).to eq(false) }
+ end
end
- context 'when user cannot update_project_member' do
+ context 'when user is attempting to update an Owner' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(false)
- allow(presenter).to receive(:can?).with(user, :override_project_member, presenter).and_return(false)
+ allow(project_member).to receive(:owner?).and_return(true)
+ end
+
+ context 'when user can manage owners' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :manage_owners, project).and_return(true)
+ end
+
+ specify { expect(presenter.can_update?).to eq(true) }
end
- it { expect(presenter.can_update?).to eq(false) }
+ context 'when user cannot manage owners' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :manage_owners, project).and_return(false)
+ end
+
+ specify { expect(presenter.can_update?).to eq(false) }
+ end
end
end
describe '#can_remove?' do
- context 'when user can destroy_project_member' do
+ context 'when user is NOT attempting to remove an Owner' do
before do
- allow(presenter).to receive(:can?).with(user, :destroy_project_member, presenter).and_return(true)
+ allow(project_member).to receive(:owner?).and_return(false)
end
- it { expect(presenter.can_remove?).to eq(true) }
+ context 'when user can destroy_project_member' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :destroy_project_member, presenter).and_return(true)
+ end
+
+ specify { expect(presenter.can_remove?).to eq(true) }
+ end
+
+ context 'when user cannot destroy_project_member' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :destroy_project_member, presenter).and_return(false)
+ end
+
+ specify { expect(presenter.can_remove?).to eq(false) }
+ end
end
- context 'when user cannot destroy_project_member' do
+ context 'when user is attempting to remove an Owner' do
before do
- allow(presenter).to receive(:can?).with(user, :destroy_project_member, presenter).and_return(false)
+ allow(project_member).to receive(:owner?).and_return(true)
+ end
+
+ context 'when user can manage owners' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :manage_owners, project).and_return(true)
+ end
+
+ specify { expect(presenter.can_remove?).to eq(true) }
end
- it { expect(presenter.can_remove?).to eq(false) }
+ context 'when user cannot manage owners' do
+ before do
+ allow(presenter).to receive(:can?).with(user, :manage_owners, project).and_return(false)
+ end
+
+ specify { expect(presenter.can_remove?).to eq(false) }
+ end
end
end
@@ -99,7 +155,7 @@ RSpec.describe ProjectMemberPresenter do
context 'and user can update_project_member' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(true)
+ allow(presenter).to receive(:can_update?).and_return(true)
end
it { expect(presenter.can_approve?).to eq(true) }
@@ -107,8 +163,7 @@ RSpec.describe ProjectMemberPresenter do
context 'and user cannot update_project_member' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(false)
- allow(presenter).to receive(:can?).with(user, :override_project_member, presenter).and_return(false)
+ allow(presenter).to receive(:can_update?).and_return(false)
end
it { expect(presenter.can_approve?).to eq(false) }
@@ -122,7 +177,7 @@ RSpec.describe ProjectMemberPresenter do
context 'and user can update_project_member' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(true)
+ allow(presenter).to receive(:can_update?).and_return(true)
end
it { expect(presenter.can_approve?).to eq(false) }
@@ -130,7 +185,7 @@ RSpec.describe ProjectMemberPresenter do
context 'and user cannot update_project_member' do
before do
- allow(presenter).to receive(:can?).with(user, :update_project_member, presenter).and_return(false)
+ allow(presenter).to receive(:can_update?).and_return(false)
end
it { expect(presenter.can_approve?).to eq(false) }
@@ -138,9 +193,32 @@ RSpec.describe ProjectMemberPresenter do
end
end
- it_behaves_like '#valid_level_roles', :project do
+ describe 'valid level roles' do
before do
- entity.group = group
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(member_user, :manage_owners, entity).and_return(can_manage_owners)
+ end
+
+ context 'when user cannot manage owners' do
+ it_behaves_like '#valid_level_roles', :project do
+ let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Reporter' => 20 } }
+ let(:can_manage_owners) { false }
+
+ before do
+ entity.group = group
+ end
+ end
+ end
+
+ context 'when user can manage owners' do
+ it_behaves_like '#valid_level_roles', :project do
+ let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Owner' => 50, 'Reporter' => 20 } }
+ let(:can_manage_owners) { true }
+
+ before do
+ entity.group = group
+ end
+ end
end
end
end
diff --git a/spec/requests/admin/broadcast_messages_controller_spec.rb b/spec/requests/admin/broadcast_messages_controller_spec.rb
new file mode 100644
index 00000000000..9101370d42d
--- /dev/null
+++ b/spec/requests/admin/broadcast_messages_controller_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::BroadcastMessagesController, :enable_admin_mode do
+ before do
+ sign_in(create(:admin))
+ end
+
+ describe 'POST /preview' do
+ it 'renders preview partial' do
+ post preview_admin_broadcast_messages_path, params: { broadcast_message: { message: "Hello, world!" } }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to render_template(:_preview)
+ end
+ end
+end
diff --git a/spec/requests/admin/integrations_controller_spec.rb b/spec/requests/admin/integrations_controller_spec.rb
index cfb40063095..128aada0975 100644
--- a/spec/requests/admin/integrations_controller_spec.rb
+++ b/spec/requests/admin/integrations_controller_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe Admin::IntegrationsController, :enable_admin_mode do
expect(response).to include_pagination_headers
expect(json_response).to contain_exactly(
{
+ 'id' => project.id,
'avatar_url' => project.avatar_url,
'full_name' => project.full_name,
'name' => project.name,
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index b6cb790bb71..260f7cbc226 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -262,4 +262,54 @@ RSpec.describe API::API do
end
end
end
+
+ describe 'content security policy header' do
+ let_it_be(:user) { create(:user) }
+
+ let(:csp) { nil }
+ let(:report_only) { false }
+
+ subject { get api("/users/#{user.id}", user) }
+
+ before do
+ allow(Rails.application.config).to receive(:content_security_policy).and_return(csp)
+ allow(Rails.application.config).to receive(:content_security_policy_report_only).and_return(report_only)
+ end
+
+ context 'when CSP is not configured globally' do
+ it 'does not set the CSP header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).to be_nil
+ end
+ end
+
+ context 'when CSP is configured globally' do
+ let(:csp) do
+ ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.default_src :self
+ end
+ end
+
+ it 'sets a stricter CSP header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).to eq("default-src 'none'")
+ end
+
+ context 'when report_only is true' do
+ let(:report_only) { true }
+
+ it 'does not set any CSP header' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.headers['Content-Security-Policy']).to be_nil
+ expect(response.headers['Content-Security-Policy-Report-Only']).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index ca6492396cd..817e1324c7c 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -57,9 +57,11 @@ RSpec.describe API::Boards do
let(:url) { "/projects/#{board_parent.id}/boards/#{board.id}" }
it 'delete the issue board' do
- delete api(url, user)
+ expect do
+ delete api(url, user)
- expect(response).to have_gitlab_http_status(:no_content)
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { board_parent.boards.count }.by(-1)
end
end
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 780e45cf443..cc696d76a02 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Branches do
let_it_be(:user) { create(:user) }
- let(:project) { create(:project, :repository, creator: user, path: 'my.project') }
+ let(:project) { create(:project, :repository, creator: user, path: 'my.project', create_branch: 'ends-with.txt') }
let(:guest) { create(:user).tap { |u| project.add_guest(u) } }
let(:branch_name) { 'feature' }
let(:branch_sha) { '0b4bc9a49b562e85de7cc9e834518ea6828729b9' }
@@ -17,7 +17,6 @@ RSpec.describe API::Branches do
before do
project.add_maintainer(user)
- project.repository.add_branch(user, 'ends-with.txt', branch_sha)
stub_feature_flags(branch_list_keyset_pagination: false)
end
@@ -201,7 +200,7 @@ RSpec.describe API::Branches do
context 'when sort value is not supported' do
it_behaves_like '400 response' do
- let(:request) { get api(route, user), params: { sort: 'unknown' }}
+ let(:request) { get api(route, user), params: { sort: 'unknown' } }
end
end
end
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 9f9907f4f00..6a3d13567bd 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -53,23 +53,80 @@ RSpec.describe API::BulkImports do
end
end
- it 'starts a new migration' do
- post api('/bulk_imports', user), params: {
- configuration: {
- url: 'http://gitlab.example',
- access_token: 'access_token'
- },
- entities: [
- source_type: 'group_entity',
- source_full_path: 'full_path',
- destination_name: 'destination_slug',
- destination_namespace: 'destination_namespace'
- ]
- }
-
- expect(response).to have_gitlab_http_status(:created)
-
- expect(json_response['status']).to eq('created')
+ shared_examples 'starting a new migration' do
+ it 'starts a new migration' do
+ post api('/bulk_imports', user), params: {
+ configuration: {
+ url: 'http://gitlab.example',
+ access_token: 'access_token'
+ },
+ entities: [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full_path',
+ destination_namespace: 'destination_namespace'
+ }.merge(destination_param)
+ ]
+ }
+
+ expect(response).to have_gitlab_http_status(:created)
+
+ expect(json_response['status']).to eq('created')
+ end
+ end
+
+ include_examples 'starting a new migration' do
+ let(:destination_param) { { destination_slug: 'destination_slug' } }
+ end
+
+ include_examples 'starting a new migration' do
+ let(:destination_param) { { destination_name: 'destination_name' } }
+ end
+
+ context 'when both destination_name & destination_slug are provided' do
+ it 'returns a mutually exclusive error' do
+ post api('/bulk_imports', user), params: {
+ configuration: {
+ url: 'http://gitlab.example',
+ access_token: 'access_token'
+ },
+ entities: [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full_path',
+ destination_name: 'destination_name',
+ destination_slug: 'destination_slug',
+ destination_namespace: 'destination_namespace'
+ }
+ ]
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ expect(json_response['error']).to eq('entities[0][destination_slug], entities[0][destination_name] are mutually exclusive')
+ end
+ end
+
+ context 'when neither destination_name nor destination_slug is provided' do
+ it 'returns at_least_one_of error' do
+ post api('/bulk_imports', user), params: {
+ configuration: {
+ url: 'http://gitlab.example',
+ access_token: 'access_token'
+ },
+ entities: [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full_path',
+ destination_namespace: 'destination_namespace'
+ }
+ ]
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+
+ expect(json_response['error']).to eq('entities[0][destination_slug], entities[0][destination_name] are missing, at least one parameter must be provided')
+ end
end
context 'when provided url is blocked' do
@@ -82,7 +139,7 @@ RSpec.describe API::BulkImports do
entities: [
source_type: 'group_entity',
source_full_path: 'full_path',
- destination_name: 'destination_slug',
+ destination_slug: 'destination_slug',
destination_namespace: 'destination_namespace'
]
}
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 84ef9f8db1b..57828e50320 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -158,7 +158,7 @@ RSpec.describe API::Ci::Jobs do
context 'with basic auth header' do
let(:personal_access_token) { create(:personal_access_token, user: user) }
- let(:token) { personal_access_token.token}
+ let(:token) { personal_access_token.token }
include_context 'with auth headers' do
let(:header) { { Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => token } }
diff --git a/spec/requests/api/ci/pipeline_schedules_spec.rb b/spec/requests/api/ci/pipeline_schedules_spec.rb
index 5fb94976c5f..30badadde13 100644
--- a/spec/requests/api/ci/pipeline_schedules_spec.rb
+++ b/spec/requests/api/ci/pipeline_schedules_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe API::Ci::PipelineSchedules do
end
matcher :return_pipeline_schedule_sucessfully do
- match_unless_raises do |reponse|
+ match_unless_raises do |response|
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('pipeline_schedule')
end
@@ -207,6 +207,110 @@ RSpec.describe API::Ci::PipelineSchedules do
end
end
+ describe 'GET /projects/:id/pipeline_schedules/:pipeline_schedule_id/pipelines' do
+ let(:pipeline_schedule) { create(:ci_pipeline_schedule, project: project, owner: developer) }
+
+ before do
+ create_list(:ci_pipeline, 2, project: project, pipeline_schedule: pipeline_schedule, source: :schedule)
+ end
+
+ let(:url) { "/projects/#{project.id}/pipeline_schedules/#{pipeline_schedule.id}/pipelines" }
+
+ matcher :return_pipeline_schedule_pipelines_successfully do
+ match_unless_raises do |reponse|
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(response).to match_response_schema('public_api/v4/pipelines')
+ end
+ end
+
+ shared_examples 'request with project permissions' do
+ context 'authenticated user with project permissions' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns the details of pipelines triggered from the pipeline schedule' do
+ get api(url, user)
+
+ expect(response).to return_pipeline_schedule_pipelines_successfully
+ end
+ end
+ end
+
+ shared_examples 'request with schedule ownership' do
+ context 'authenticated user with pipeline schedule ownership' do
+ it 'returns the details of pipelines triggered from the pipeline schedule' do
+ get api(url, developer)
+
+ expect(response).to return_pipeline_schedule_pipelines_successfully
+ end
+ end
+ end
+
+ shared_examples 'request with unauthenticated user' do
+ context 'with unauthenticated user' do
+ it 'does not return the details of pipelines triggered from the pipeline schedule' do
+ get api(url)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
+ shared_examples 'request with non-existing pipeline_schedule' do
+ it "responds with 404 Not Found if requesting for a non-existing pipeline schedule's pipelines" do
+ get api("/projects/#{project.id}/pipeline_schedules/#{non_existing_record_id}/pipelines", developer)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with private project' do
+ it_behaves_like 'request with schedule ownership'
+ it_behaves_like 'request with project permissions'
+ it_behaves_like 'request with unauthenticated user'
+ it_behaves_like 'request with non-existing pipeline_schedule'
+
+ context 'authenticated user with no project permissions' do
+ it 'does not return the details of pipelines triggered from the pipeline schedule' do
+ get api(url, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'authenticated user with insufficient project permissions' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'does not return the details of pipelines triggered from the pipeline schedule' do
+ get api(url, user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'with public project' do
+ let_it_be(:project) { create(:project, :repository, :public, public_builds: false) }
+
+ it_behaves_like 'request with schedule ownership'
+ it_behaves_like 'request with project permissions'
+ it_behaves_like 'request with unauthenticated user'
+ it_behaves_like 'request with non-existing pipeline_schedule'
+
+ context 'authenticated user with no project permissions' do
+ it 'returns the details of pipelines triggered from the pipeline schedule' do
+ get api(url, user)
+
+ expect(response).to return_pipeline_schedule_pipelines_successfully
+ end
+ end
+ end
+ end
+
describe 'POST /projects/:id/pipeline_schedules' do
let(:params) { attributes_for(:ci_pipeline_schedule) }
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index 746be1ccc44..cd58251cfcc 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
describe 'POST /api/v4/jobs/request' do
let!(:last_update) {}
- let!(:new_update) { }
+ let!(:new_update) {}
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index c3c074d80d9..d42043a7fe5 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -61,7 +61,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
end
context 'when job has been updated recently' do
- it { expect { patch_the_trace }.not_to change { job.updated_at }}
+ it { expect { patch_the_trace }.not_to change { job.updated_at } }
it "changes the job's trace" do
patch_the_trace
@@ -70,7 +70,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_trace_chunks do
end
context 'when Runner makes a force-patch' do
- it { expect { force_patch_the_trace }.not_to change { job.updated_at }}
+ it { expect { force_patch_the_trace }.not_to change { job.updated_at } }
it "doesn't change the build.trace" do
force_patch_the_trace
diff --git a/spec/requests/api/ci/runner/runners_post_spec.rb b/spec/requests/api/ci/runner/runners_post_spec.rb
index 50ace7adccb..47302046865 100644
--- a/spec/requests/api/ci/runner/runners_post_spec.rb
+++ b/spec/requests/api/ci/runner/runners_post_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
context 'when invalid token is provided' do
it 'returns 403 error' do
allow_next_instance_of(::Ci::Runners::RegisterRunnerService) do |service|
- allow(service).to receive(:execute).and_return(nil)
+ allow(service).to receive(:execute)
+ .and_return(ServiceResponse.error(message: 'invalid token supplied', http_status: :forbidden))
end
post api('/runners'), params: { token: 'invalid' }
@@ -58,7 +59,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(service).to receive(:execute)
.once
.with('valid token', a_hash_including(expected_params))
- .and_return(new_runner)
+ .and_return(ServiceResponse.success(payload: { runner: new_runner }))
end
end
@@ -113,7 +114,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
.once
.with('valid token', a_hash_including('maintenance_note' => 'Some maintainer notes')
.and(excluding('maintainter_note' => anything)))
- .and_return(new_runner)
+ .and_return(ServiceResponse.success(payload: { runner: new_runner }))
end
request
@@ -139,7 +140,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(service).to receive(:execute)
.once
.with('valid token', a_hash_including(expected_params))
- .and_return(new_runner)
+ .and_return(ServiceResponse.success(payload: { runner: new_runner }))
end
request
diff --git a/spec/requests/api/ci/secure_files_spec.rb b/spec/requests/api/ci/secure_files_spec.rb
index 6f16fe5460b..f1f22dfadc2 100644
--- a/spec/requests/api/ci/secure_files_spec.rb
+++ b/spec/requests/api/ci/secure_files_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe API::Ci::SecureFiles do
expect do
post api("/projects/#{project.id}/secure_files", maintainer), params: file_params
- end.not_to change {project.secure_files.count}
+ end.not_to change { project.secure_files.count }
expect(response).to have_gitlab_http_status(:service_unavailable)
end
@@ -78,7 +78,7 @@ RSpec.describe API::Ci::SecureFiles do
it 'returns a 201 when uploading a file when the ci_secure_files_read_only feature flag is disabled' do
expect do
post api("/projects/#{project.id}/secure_files", maintainer), params: file_params
- end.to change {project.secure_files.count}.by(1)
+ end.to change { project.secure_files.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
end
@@ -249,7 +249,7 @@ RSpec.describe API::Ci::SecureFiles do
it 'creates a secure file' do
expect do
post api("/projects/#{project.id}/secure_files", maintainer), params: file_params
- end.to change {project.secure_files.count}.by(1)
+ end.to change { project.secure_files.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['name']).to eq('upload-keystore.jks')
diff --git a/spec/requests/api/ci/triggers_spec.rb b/spec/requests/api/ci/triggers_spec.rb
index a036a55f5f3..953dcb8a483 100644
--- a/spec/requests/api/ci/triggers_spec.rb
+++ b/spec/requests/api/ci/triggers_spec.rb
@@ -136,8 +136,8 @@ RSpec.describe API::Ci::Triggers do
end
context 'when triggered from another running job' do
- let!(:trigger) { }
- let!(:trigger_request) { }
+ let!(:trigger) {}
+ let!(:trigger_request) {}
context 'when other job is triggered by a user' do
let(:trigger_token) { create(:ci_build, :running, project: project, user: user).token }
@@ -242,7 +242,7 @@ RSpec.describe API::Ci::Triggers do
expect do
post api("/projects/#{project.id}/triggers", user),
params: { description: 'trigger' }
- end.to change {project.triggers.count}.by(1)
+ end.to change { project.triggers.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include('description' => 'trigger')
@@ -335,7 +335,7 @@ RSpec.describe API::Ci::Triggers do
delete api("/projects/#{project.id}/triggers/#{trigger.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.triggers.count}.by(-1)
+ end.to change { project.triggers.count }.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing trigger' do
diff --git a/spec/requests/api/ci/variables_spec.rb b/spec/requests/api/ci/variables_spec.rb
index dc524e121d5..74ed8c1551d 100644
--- a/spec/requests/api/ci/variables_spec.rb
+++ b/spec/requests/api/ci/variables_spec.rb
@@ -116,7 +116,7 @@ RSpec.describe API::Ci::Variables do
it 'creates variable' do
expect do
post api("/projects/#{project.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true, masked: true }
- end.to change {project.variables.count}.by(1)
+ end.to change { project.variables.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
@@ -129,7 +129,7 @@ RSpec.describe API::Ci::Variables do
it 'creates variable with optional attributes' do
expect do
post api("/projects/#{project.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
- end.to change {project.variables.count}.by(1)
+ end.to change { project.variables.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
@@ -142,7 +142,7 @@ RSpec.describe API::Ci::Variables do
it 'does not allow to duplicate variable key' do
expect do
post api("/projects/#{project.id}/variables", user), params: { key: variable.key, value: 'VALUE_2' }
- end.to change {project.variables.count}.by(0)
+ end.to change { project.variables.count }.by(0)
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -268,7 +268,7 @@ RSpec.describe API::Ci::Variables do
delete api("/projects/#{project.id}/variables/#{variable.key}", user)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.variables.count}.by(-1)
+ end.to change { project.variables.count }.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
@@ -295,7 +295,7 @@ RSpec.describe API::Ci::Variables do
delete api("/projects/#{project.id}/variables/key1", user), params: { 'filter[environment_scope]': 'production' }
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.variables.count}.by(-1)
+ end.to change { project.variables.count }.by(-1)
expect(var1.reload).to be_present
expect { var2.reload }.to raise_error(ActiveRecord::RecordNotFound)
diff --git a/spec/requests/api/clusters/agents_spec.rb b/spec/requests/api/clusters/agents_spec.rb
index 72d4266b9e3..5e3bdd69529 100644
--- a/spec/requests/api/clusters/agents_spec.rb
+++ b/spec/requests/api/clusters/agents_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe API::Clusters::Agents do
expect do
post(api("/projects/#{project.id}/cluster_agents", user),
params: { name: 'some-agent' })
- end.to change {project.cluster_agents.count}.by(1)
+ end.to change { project.cluster_agents.count }.by(1)
aggregate_failures "testing response" do
expect(response).to have_gitlab_http_status(:created)
@@ -139,7 +139,7 @@ RSpec.describe API::Clusters::Agents do
delete api("/projects/#{project.id}/cluster_agents/#{agent.id}", user)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {project.cluster_agents.count}.by(-1)
+ end.to change { project.cluster_agents.count }.by(-1)
end
it 'returns a 404 error when deleting non existent agent' do
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 9ef845f06bf..68fe45cd026 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -206,11 +206,13 @@ RSpec.describe API::Commits do
let(:page) { 1 }
let(:per_page) { 5 }
let(:ref_name) { 'master' }
- let!(:request) do
+ let(:request) do
get api("/projects/#{project_id}/repository/commits?page=#{page}&per_page=#{per_page}&ref_name=#{ref_name}", user)
end
it 'returns correct headers' do
+ request
+
expect(response).to include_limited_pagination_headers
expect(response.headers['Link']).to match(/page=1&per_page=5/)
expect(response.headers['Link']).to match(/page=2&per_page=5/)
@@ -218,6 +220,8 @@ RSpec.describe API::Commits do
context 'viewing the first page' do
it 'returns the first 5 commits' do
+ request
+
commit = project.repository.commit
expect(json_response.size).to eq(per_page)
@@ -230,6 +234,8 @@ RSpec.describe API::Commits do
let(:page) { 3 }
it 'returns the third 5 commits' do
+ request
+
commit = project.repository.commits('HEAD', limit: per_page, offset: (page - 1) * per_page).first
expect(json_response.size).to eq(per_page)
@@ -238,10 +244,55 @@ RSpec.describe API::Commits do
end
end
- context 'when per_page is 0' do
- let(:per_page) { 0 }
+ context 'when pagination params are invalid' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:page, :per_page, :error_message) do
+ 0 | nil | 'page does not have a valid value'
+ -1 | nil | 'page does not have a valid value'
+ 'a' | nil | 'page is invalid'
+ nil | 0 | 'per_page does not have a valid value'
+ nil | -1 | 'per_page does not have a valid value'
+ nil | 'a' | 'per_page is invalid'
+ end
+
+ with_them do
+ it 'returns 400 response' do
+ request
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(error_message)
+ end
+ end
- it_behaves_like '400 response'
+ context 'when FF is off' do
+ before do
+ stub_feature_flags(only_positive_pagination_values: false)
+ end
+
+ where(:page, :per_page, :error_message, :status) do
+ 0 | nil | nil | :success
+ -10 | nil | nil | :internal_server_error
+ 'a' | nil | 'page is invalid' | :bad_request
+ nil | 0 | 'per_page has a value not allowed' | :bad_request
+ nil | -1 | nil | :success
+ nil | 'a' | 'per_page is invalid' | :bad_request
+ end
+
+ with_them do
+ it 'returns a response' do
+ request
+
+ expect(response).to have_gitlab_http_status(status)
+
+ if error_message
+ expect(json_response['error']).to eq(error_message)
+ end
+ end
+ end
+ end
end
end
@@ -928,6 +979,40 @@ RSpec.describe API::Commits do
end
end
+ context 'when action is missing' do
+ let(:params) do
+ {
+ branch: 'master',
+ commit_message: 'Invalid',
+ actions: [{ action: nil, file_path: 'files/ruby/popen.rb' }]
+ }
+ end
+
+ it 'responds with 400 bad request' do
+ post api(url, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('actions[0][action] is empty')
+ end
+ end
+
+ context 'when action is not supported' do
+ let(:params) do
+ {
+ branch: 'master',
+ commit_message: 'Invalid',
+ actions: [{ action: 'unknown', file_path: 'files/ruby/popen.rb' }]
+ }
+ end
+
+ it 'responds with 400 bad request' do
+ post api(url, user), params: params
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq('actions[0][action] does not have a valid value')
+ end
+ end
+
context 'when committing into a fork as a maintainer' do
include_context 'merge request allowing collaboration'
@@ -988,8 +1073,8 @@ RSpec.describe API::Commits do
it 'returns all refs with no scope' do
get api(route, current_user), params: { per_page: 100 }
- refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
- refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
+ refs = project.repository.branch_names_contains(commit_id).map { |name| ['branch', name] }
+ refs.concat(project.repository.tag_names_contains(commit_id).map { |name| ['tag', name] })
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_limited_pagination_headers
@@ -1000,8 +1085,8 @@ RSpec.describe API::Commits do
it 'returns all refs' do
get api(route, current_user), params: { type: 'all', per_page: 100 }
- refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
- refs.concat(project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]})
+ refs = project.repository.branch_names_contains(commit_id).map { |name| ['branch', name] }
+ refs.concat(project.repository.tag_names_contains(commit_id).map { |name| ['tag', name] })
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
@@ -1010,7 +1095,7 @@ RSpec.describe API::Commits do
it 'returns the branch refs' do
get api(route, current_user), params: { type: 'branch', per_page: 100 }
- refs = project.repository.branch_names_contains(commit_id).map {|name| ['branch', name]}
+ refs = project.repository.branch_names_contains(commit_id).map { |name| ['branch', name] }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
@@ -1019,7 +1104,7 @@ RSpec.describe API::Commits do
it 'returns the tag refs' do
get api(route, current_user), params: { type: 'tag', per_page: 100 }
- refs = project.repository.tag_names_contains(commit_id).map {|name| ['tag', name]}
+ refs = project.repository.tag_names_contains(commit_id).map { |name| ['tag', name] }
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.map { |r| [r['type'], r['name']] }.compact).to eq(refs)
@@ -2036,7 +2121,7 @@ RSpec.describe API::Commits do
context 'unsigned commit' do
it_behaves_like '404 response' do
let(:request) { get api(route, current_user) }
- let(:message) { '404 Signature Not Found'}
+ let(:message) { '404 Signature Not Found' }
end
end
diff --git a/spec/requests/api/conan_instance_packages_spec.rb b/spec/requests/api/conan_instance_packages_spec.rb
index 54cad3093d7..e4747e0eb99 100644
--- a/spec/requests/api/conan_instance_packages_spec.rb
+++ b/spec/requests/api/conan_instance_packages_spec.rb
@@ -94,7 +94,7 @@ RSpec.describe API::ConanInstancePackages do
end
describe 'DELETE /api/v4/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
- subject { delete api("/packages/conan/v1/conans/#{recipe_path}"), headers: headers}
+ subject { delete api("/packages/conan/v1/conans/#{recipe_path}"), headers: headers }
it_behaves_like 'delete package endpoint'
end
diff --git a/spec/requests/api/conan_project_packages_spec.rb b/spec/requests/api/conan_project_packages_spec.rb
index e28105eb8eb..48e36b55a68 100644
--- a/spec/requests/api/conan_project_packages_spec.rb
+++ b/spec/requests/api/conan_project_packages_spec.rb
@@ -93,7 +93,7 @@ RSpec.describe API::ConanProjectPackages do
end
describe 'DELETE /api/v4/projects/:id/packages/conan/v1/conans/:package_name/package_version/:package_username/:package_channel' do
- subject { delete api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}"), headers: headers}
+ subject { delete api("/projects/#{project_id}/packages/conan/v1/conans/#{recipe_path}"), headers: headers }
it_behaves_like 'delete package endpoint'
end
diff --git a/spec/requests/api/dependency_proxy_spec.rb b/spec/requests/api/dependency_proxy_spec.rb
index 067852ef1e9..a8617fcb0bf 100644
--- a/spec/requests/api/dependency_proxy_spec.rb
+++ b/spec/requests/api/dependency_proxy_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe API::DependencyProxy, api: true do
let_it_be(:user) { create(:user) }
- let_it_be(:blob) { create(:dependency_proxy_blob )}
+ let_it_be(:blob) { create(:dependency_proxy_blob ) }
let_it_be(:group, reload: true) { blob.group }
before do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 69f7b54c277..24c3ee59c18 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -448,6 +448,90 @@ RSpec.describe API::Deployments do
end
end
+ describe 'DELETE /projects/:id/deployments/:deployment_id' do
+ let(:project) { create(:project, :repository) }
+ let(:environment) { create(:environment, project: project) }
+ let(:commits) { project.repository.commits(nil, { limit: 3 }) }
+ let!(:deploy) do
+ create(
+ :deployment,
+ :success,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[1].sha
+ )
+ end
+
+ let!(:old_deploy) do
+ create(
+ :deployment,
+ :success,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[0].sha,
+ finished_at: 1.year.ago
+ )
+ end
+
+ let!(:running_deploy) do
+ create(
+ :deployment,
+ :running,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[2].sha
+ )
+ end
+
+ context 'as an maintainer' do
+ it 'deletes a deployment' do
+ delete api("/projects/#{project.id}/deployments/#{old_deploy.id}", user)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'will not delete a running deployment' do
+ delete api("/projects/#{project.id}/deployments/#{running_deploy.id}", user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.body).to include("Cannot destroy running deployment")
+ end
+ end
+
+ context 'as a developer' do
+ let(:developer) { create(:user) }
+
+ before do
+ project.add_developer(developer)
+ end
+
+ it 'is forbidden' do
+ delete api("/projects/#{project.id}/deployments/#{deploy.id}", developer)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'as non member' do
+ it 'is not found' do
+ delete api("/projects/#{project.id}/deployments/#{deploy.id}", non_member)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'for non-existent deployment' do
+ it 'is not found' do
+ delete api("/projects/#{project.id}/deployments/#{non_existing_record_id}", project.first_owner)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
describe 'GET /projects/:id/deployments/:deployment_id/merge_requests' do
let(:project) { create(:project, :repository) }
let!(:deployment) { create(:deployment, :success, project: project) }
diff --git a/spec/requests/api/doorkeeper_access_spec.rb b/spec/requests/api/doorkeeper_access_spec.rb
index 77f1dadff46..14da9a600cd 100644
--- a/spec/requests/api/doorkeeper_access_spec.rb
+++ b/spec/requests/api/doorkeeper_access_spec.rb
@@ -9,13 +9,13 @@ RSpec.describe 'doorkeeper access' do
describe "unauthenticated" do
it "returns authentication success" do
- get api("/user"), params: { access_token: token.token }
+ get api("/user"), params: { access_token: token.plaintext_token }
expect(response).to have_gitlab_http_status(:ok)
end
include_examples 'user login request with unique ip limit' do
def request
- get api('/user'), params: { access_token: token.token }
+ get api('/user'), params: { access_token: token.plaintext_token }
end
end
end
@@ -42,7 +42,7 @@ RSpec.describe 'doorkeeper access' do
shared_examples 'forbidden request' do
it 'returns 403 response' do
- get api("/user"), params: { access_token: token.token }
+ get api("/user"), params: { access_token: token.plaintext_token }
expect(response).to have_gitlab_http_status(:forbidden)
end
diff --git a/spec/requests/api/go_proxy_spec.rb b/spec/requests/api/go_proxy_spec.rb
index 0143340de11..7c44fddc303 100644
--- a/spec/requests/api/go_proxy_spec.rb
+++ b/spec/requests/api/go_proxy_spec.rb
@@ -318,7 +318,7 @@ RSpec.describe API::GoProxy do
context 'with a case sensitive project and versions' do
let_it_be(:project) { create :project_empty_repo, :public, creator: user, path: 'MyGoLib' }
let_it_be(:base) { "#{Settings.build_gitlab_go_url}/#{project.full_path}" }
- let_it_be(:base_encoded) { base.gsub(/[A-Z]/) { |s| "!#{s.downcase}"} }
+ let_it_be(:base_encoded) { base.gsub(/[A-Z]/) { |s| "!#{s.downcase}" } }
let_it_be(:modules) do
create(:go_module_commit, :files, project: project, files: { 'README.md' => "Hi" })
@@ -376,7 +376,7 @@ RSpec.describe API::GoProxy do
end
it 'returns ok with a job token' do
- get_resource(oauth_access_token: job)
+ get_resource(access_token: job)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -395,7 +395,7 @@ RSpec.describe API::GoProxy do
it 'returns unauthorized with a failed job token' do
job.update!(status: :failed)
- get_resource(oauth_access_token: job)
+ get_resource(access_token: job)
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -445,7 +445,7 @@ RSpec.describe API::GoProxy do
end
it 'returns not found with a job token' do
- get_resource(oauth_access_token: job)
+ get_resource(access_token: job)
expect(response).to have_gitlab_http_status(:not_found)
end
diff --git a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
index 6324db0be4a..484ddc3469b 100644
--- a/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_list_issues_query_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'get board lists' do
let_it_be(:group_label2) { create(:group_label, group: group, name: 'Testing') }
let(:params) { '' }
- let(:board) { }
+ let(:board) {}
let(:confidential) { false }
let(:board_parent_type) { board_parent.class.to_s.downcase }
let(:board_data) { graphql_data[board_parent_type]['boards']['nodes'][0] }
diff --git a/spec/requests/api/graphql/boards/board_lists_query_spec.rb b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
index 39ff108a9e1..6fe2e41cf35 100644
--- a/spec/requests/api/graphql/boards/board_lists_query_spec.rb
+++ b/spec/requests/api/graphql/boards/board_lists_query_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'get board lists' do
let_it_be(:group_label2) { create(:group_label, group: group, name: 'Testing') }
let(:params) { '' }
- let(:board) { }
+ let(:board) {}
let(:board_parent_type) { board_parent.class.to_s.downcase }
let(:board_data) { graphql_data[board_parent_type]['boards']['edges'].first['node'] }
let(:lists_data) { board_data['lists']['edges'] }
diff --git a/spec/requests/api/graphql/ci/instance_variables_spec.rb b/spec/requests/api/graphql/ci/instance_variables_spec.rb
index 7acf73a4e7a..c5c88697bf4 100644
--- a/spec/requests/api/graphql/ci/instance_variables_spec.rb
+++ b/spec/requests/api/graphql/ci/instance_variables_spec.rb
@@ -57,4 +57,16 @@ RSpec.describe 'Query.ciVariables' do
expect(graphql_data.dig('ciVariables')).to be_nil
end
end
+
+ context 'when the user is unauthenticated' do
+ let_it_be(:user) { nil }
+
+ it 'returns nothing' do
+ create(:ci_instance_variable, value: 'verysecret', masked: true)
+
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data.dig('ciVariables')).to be_nil
+ end
+ end
end
diff --git a/spec/requests/api/graphql/ci/manual_variables_spec.rb b/spec/requests/api/graphql/ci/manual_variables_spec.rb
index b7aa76511a3..a15bac2b8bd 100644
--- a/spec/requests/api/graphql/ci/manual_variables_spec.rb
+++ b/spec/requests/api/graphql/ci/manual_variables_spec.rb
@@ -35,8 +35,8 @@ RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables' do
project.add_maintainer(user)
end
- it 'returns the manual variables for the jobs' do
- job = create(:ci_build, :manual, pipeline: pipeline)
+ it 'returns the manual variables for actionable jobs' do
+ job = create(:ci_build, :actionable, pipeline: pipeline)
create(:ci_job_variable, key: 'MANUAL_TEST_VAR', job: job)
post_graphql(query, current_user: user)
@@ -46,8 +46,8 @@ RSpec.describe 'Query.project(fullPath).pipelines.jobs.manualVariables' do
expect(variables_data.map { |var| var['key'] }).to match_array(['MANUAL_TEST_VAR'])
end
- it 'does not fetch job variables for jobs that are not manual' do
- job = create(:ci_build, pipeline: pipeline)
+ it 'does not fetch job variables for jobs that are not actionable' do
+ job = create(:ci_build, pipeline: pipeline, status: :manual)
create(:ci_job_variable, key: 'THIS_VAR_WOULD_SHOULD_NEVER_EXIST', job: job)
post_graphql(query, current_user: user)
diff --git a/spec/requests/api/graphql/ci/pipelines_spec.rb b/spec/requests/api/graphql/ci/pipelines_spec.rb
index a968e5508cb..f471a152603 100644
--- a/spec/requests/api/graphql/ci/pipelines_spec.rb
+++ b/spec/requests/api/graphql/ci/pipelines_spec.rb
@@ -166,6 +166,35 @@ RSpec.describe 'Query.project(fullPath).pipelines' do
end
end
+ describe '.job' do
+ let(:first_n) { var('Int') }
+ let(:query_path) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines],
+ [:nodes],
+ [:job, { name: 'Job 1' }]
+ ]
+ end
+
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, :status))
+ end
+
+ before_all do
+ pipeline = create(:ci_pipeline, project: project)
+ create(:ci_build, pipeline: pipeline, name: 'Job 1', status: :failed, retried: true)
+ create(:ci_build, pipeline: pipeline, name: 'Job 1', status: :success)
+ end
+
+ it 'fetches the latest job with the given name' do
+ post_graphql(query, current_user: user)
+ expect(graphql_data_at(*query_path.map(&:first))).to contain_exactly a_hash_including(
+ 'status' => 'SUCCESS'
+ )
+ end
+ end
+
describe '.jobs' do
let(:first_n) { var('Int') }
let(:query_path) do
diff --git a/spec/requests/api/graphql/ci/runners_spec.rb b/spec/requests/api/graphql/ci/runners_spec.rb
index a5b8115286e..749f6839cb5 100644
--- a/spec/requests/api/graphql/ci/runners_spec.rb
+++ b/spec/requests/api/graphql/ci/runners_spec.rb
@@ -37,7 +37,9 @@ RSpec.describe 'Query.runners' do
end
before do
- allow(Gitlab::Ci::RunnerUpgradeCheck.instance).to receive(:check_runner_upgrade_status)
+ allow_next_instance_of(::Gitlab::Ci::RunnerUpgradeCheck) do |instance|
+ allow(instance).to receive(:check_runner_upgrade_suggestion)
+ end
post_graphql(query, current_user: current_user)
end
diff --git a/spec/requests/api/graphql/crm/contacts_spec.rb b/spec/requests/api/graphql/crm/contacts_spec.rb
index 7e824140894..a676e92dc3b 100644
--- a/spec/requests/api/graphql/crm/contacts_spec.rb
+++ b/spec/requests/api/graphql/crm/contacts_spec.rb
@@ -12,11 +12,11 @@ RSpec.describe 'getting CRM contacts' do
create(
:contact,
group: group,
- first_name: "ABC",
- last_name: "DEF",
- email: "ghi@test.com",
- description: "LMNO",
- state: "inactive"
+ first_name: "PQR",
+ last_name: "STU",
+ email: "aaa@test.com",
+ description: "YZ",
+ state: "active"
)
end
@@ -26,9 +26,9 @@ RSpec.describe 'getting CRM contacts' do
group: group,
first_name: "ABC",
last_name: "DEF",
- email: "vwx@test.com",
- description: "YZ",
- state: "active"
+ email: "ghi@test.com",
+ description: "LMNO",
+ state: "inactive"
)
end
@@ -36,9 +36,9 @@ RSpec.describe 'getting CRM contacts' do
create(
:contact,
group: group,
- first_name: "PQR",
- last_name: "STU",
- email: "aaa@test.com",
+ first_name: "JKL",
+ last_name: "MNO",
+ email: "vwx@test.com",
description: "YZ",
state: "active"
)
@@ -51,7 +51,7 @@ RSpec.describe 'getting CRM contacts' do
it_behaves_like 'sorted paginated query' do
let(:sort_argument) { {} }
let(:first_param) { 2 }
- let(:all_records) { [contact_a, contact_b, contact_c] }
+ let(:all_records) { [contact_b, contact_c, contact_a] }
let(:data_path) { [:group, :contacts] }
def pagination_query(params)
diff --git a/spec/requests/api/graphql/current_user/groups_query_spec.rb b/spec/requests/api/graphql/current_user/groups_query_spec.rb
index ef0f32bacf0..6e36beb2afc 100644
--- a/spec/requests/api/graphql/current_user/groups_query_spec.rb
+++ b/spec/requests/api/graphql/current_user/groups_query_spec.rb
@@ -6,10 +6,11 @@ RSpec.describe 'Query current user groups' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
+ let_it_be(:root_group) { create(:group, name: 'Root group', path: 'root-group') }
let_it_be(:guest_group) { create(:group, name: 'public guest', path: 'public-guest') }
- let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer') }
+ let_it_be(:private_maintainer_group) { create(:group, :private, name: 'b private maintainer', path: 'b-private-maintainer', parent: root_group) }
let_it_be(:public_developer_group) { create(:group, project_creation_level: nil, name: 'c public developer', path: 'c-public-developer') }
- let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer') }
+ let_it_be(:public_maintainer_group) { create(:group, name: 'a public maintainer', path: 'a-public-maintainer', parent: root_group) }
let_it_be(:public_owner_group) { create(:group, name: 'a public owner', path: 'a-public-owner') }
let(:group_arguments) { {} }
@@ -77,7 +78,7 @@ RSpec.describe 'Query current user groups' do
end
context 'when search is provided' do
- let(:group_arguments) { { permission_scope: :CREATE_PROJECTS, search: 'maintainer' } }
+ let(:group_arguments) { { permission_scope: :CREATE_PROJECTS, search: 'root-group maintainer' } }
specify do
is_expected.to match(
@@ -127,6 +128,18 @@ RSpec.describe 'Query current user groups' do
)
)
end
+
+ context 'when searching for a full path (including parent)' do
+ let(:group_arguments) { { search: 'root-group/b-private-maintainer' } }
+
+ specify do
+ is_expected.to match(
+ expected_group_hash(
+ private_maintainer_group
+ )
+ )
+ end
+ end
end
def expected_group_hash(*groups)
diff --git a/spec/requests/api/graphql/custom_emoji_query_spec.rb b/spec/requests/api/graphql/custom_emoji_query_spec.rb
index 874357d9eef..13b7a22e791 100644
--- a/spec/requests/api/graphql/custom_emoji_query_spec.rb
+++ b/spec/requests/api/graphql/custom_emoji_query_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe 'getting custom emoji within namespace' do
post_graphql(custom_emoji_query(group), current_user: current_user)
expect(response).to have_gitlab_http_status(:ok)
- expect(graphql_data['group']['customEmoji']['nodes'].count). to eq(1)
- expect(graphql_data['group']['customEmoji']['nodes'].first['name']). to eq(custom_emoji.name)
+ expect(graphql_data['group']['customEmoji']['nodes'].count).to eq(1)
+ expect(graphql_data['group']['customEmoji']['nodes'].first['name']).to eq(custom_emoji.name)
end
it 'returns nil when unauthorised' do
diff --git a/spec/requests/api/graphql/group/group_members_spec.rb b/spec/requests/api/graphql/group/group_members_spec.rb
index 1ff5b134e92..bab8d5b770c 100644
--- a/spec/requests/api/graphql/group/group_members_spec.rb
+++ b/spec/requests/api/graphql/group/group_members_spec.rb
@@ -64,24 +64,6 @@ RSpec.describe 'getting group members information' do
expect_array_response(user_2)
end
-
- context 'when the use_keyset_aware_user_search_query FF is off' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: false)
- end
-
- it 'raises error on the 2nd page due to missing cursor data' do
- fetch_members(args: { search: 'Same Name', first: 1 })
-
- # user_2 because the "old" order was undeterministic (insert order), no tie-breaker column
- expect_array_response(user_2)
-
- next_cursor = graphql_data_at(:group, :groupMembers, :pageInfo, :endCursor)
- fetch_members(args: { search: 'Same Name', first: 1, after: next_cursor })
-
- expect(graphql_errors.first['message']).to include('PG::UndefinedColumn')
- end
- end
end
end
end
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index fd0ee5d52b9..8ee5c3c5d73 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -122,6 +122,87 @@ RSpec.describe 'getting group information' do
end
end
+ context 'with timelog categories' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:timelog_category) { create(:timelog_category, namespace: group, name: 'TimelogCategoryTest') }
+
+ context 'when user is guest' do
+ it 'includes empty timelog categories array' do
+ post_graphql(group_query(group), current_user: user2)
+
+ expect(graphql_data_at(:group, :timelogCategories, :nodes)).to match([])
+ end
+ end
+
+ context 'when user has reporter role' do
+ before do
+ group.add_reporter(user2)
+ end
+
+ it 'returns the timelog category with all its fields' do
+ post_graphql(group_query(group), current_user: user2)
+
+ expect(graphql_data_at(:group, :timelogCategories, :nodes))
+ .to contain_exactly(a_graphql_entity_for(timelog_category))
+ end
+
+ context 'when timelog_categories flag is disabled' do
+ before do
+ stub_feature_flags(timelog_categories: false)
+ end
+
+ it 'returns no timelog categories' do
+ post_graphql(group_query(group), current_user: user2)
+
+ expect(graphql_data_at(:group, :timelogCategories)).to be_nil
+ end
+ end
+ end
+
+ context 'for N+1 queries' do
+ let!(:group1) { create(:group) }
+ let!(:group2) { create(:group) }
+
+ before do
+ group1.add_reporter(user2)
+ group2.add_reporter(user2)
+ end
+
+ it 'avoids N+1 database queries' do
+ pending('See: https://gitlab.com/gitlab-org/gitlab/-/issues/369396')
+
+ ctx = { current_user: user2 }
+
+ baseline_query = <<~GQL
+ query {
+ a: group(fullPath: "#{group1.full_path}") { ... g }
+ }
+
+ fragment g on Group {
+ timelogCategories { nodes { name } }
+ }
+ GQL
+
+ query = <<~GQL
+ query {
+ a: group(fullPath: "#{group1.full_path}") { ... g }
+ b: group(fullPath: "#{group2.full_path}") { ... g }
+ }
+
+ fragment g on Group {
+ timelogCategories { nodes { name } }
+ }
+ GQL
+
+ control = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(baseline_query, context: ctx)
+ end
+
+ expect { run_with_clean_state(query, context: ctx) }.not_to exceed_query_limit(control)
+ end
+ end
+ end
+
context "when authenticated as admin" do
it "returns any existing group" do
post_graphql(group_query(private_group), current_user: admin)
diff --git a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
index fdf5503a3a2..3879e58cecf 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/add_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe 'Adding an AwardEmoji' do
end
describe 'marking Todos as done' do
- let(:user) { current_user}
+ let(:user) { current_user }
subject { post_graphql_mutation(mutation, current_user: user) }
diff --git a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
index 6b26e37e30c..7ddffa1ab0a 100644
--- a/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
+++ b/spec/requests/api/graphql/mutations/award_emojis/toggle_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe 'Toggling an AwardEmoji' do
end
describe 'marking Todos as done' do
- let(:user) { current_user}
+ let(:user) { current_user }
subject { post_graphql_mutation(mutation, current_user: user) }
diff --git a/spec/requests/api/graphql/mutations/boards/destroy_spec.rb b/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
index 23e099e94b6..7620da3e7e0 100644
--- a/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/destroy_spec.rb
@@ -65,15 +65,8 @@ RSpec.describe Mutations::Boards::Destroy do
other_board.destroy!
end
- it 'does not destroy the board' do
- expect { subject }.not_to change { Board.count }.from(1)
- end
-
- it 'returns an error and not nil board' do
- subject
-
- expect(mutation_response['errors']).not_to be_empty
- expect(mutation_response['board']).not_to be_nil
+ it 'does destroy the board' do
+ expect { subject }.to change { Board.count }.by(-1)
end
end
end
diff --git a/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb b/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb
index ef640183bd8..8cf559a372a 100644
--- a/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job_retry_spec.rb
@@ -47,6 +47,38 @@ RSpec.describe 'JobRetry' do
expect(new_job).not_to be_retried
end
+ context 'when given CI variables' do
+ let(:job) { create(:ci_build, :success, :actionable, pipeline: pipeline, name: 'build') }
+
+ let(:mutation) do
+ variables = {
+ id: job.to_global_id.to_s,
+ variables: { key: 'MANUAL_VAR', value: 'test manual var' }
+ }
+
+ graphql_mutation(:job_retry, variables,
+ <<-QL
+ errors
+ job {
+ id
+ }
+ QL
+ )
+ end
+
+ it 'applies them to a retried manual job' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ new_job_id = GitlabSchema.object_from_id(mutation_response['job']['id']).sync.id
+ new_job = ::Ci::Build.find(new_job_id)
+ expect(new_job.job_variables.count).to be(1)
+ expect(new_job.job_variables.first.key).to eq('MANUAL_VAR')
+ expect(new_job.job_variables.first.value).to eq('test manual var')
+ end
+ end
+
context 'when the job is not retryable' do
let(:job) { create(:ci_build, :retried, pipeline: pipeline) }
diff --git a/spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb b/spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb
index d9106aa42c4..6ec1b7ce9b6 100644
--- a/spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/pipeline_cancel_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'PipelineCancel' do
expect(build).not_to be_canceled
end
- it "cancels all cancelable builds from a pipeline" do
+ it 'cancels all cancelable builds from a pipeline', :sidekiq_inline do
build = create(:ci_build, :running, pipeline: pipeline)
post_graphql_mutation(mutation, current_user: user)
diff --git a/spec/requests/api/graphql/mutations/merge_requests/request_attention_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/request_attention_spec.rb
deleted file mode 100644
index 9c751913827..00000000000
--- a/spec/requests/api/graphql/mutations/merge_requests/request_attention_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Request attention' do
- include GraphqlHelpers
-
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user]) }
- let_it_be(:project) { merge_request.project }
-
- let(:input) { { user_id: global_id_of(user) } }
-
- let(:mutation) do
- variables = {
- project_path: project.full_path,
- iid: merge_request.iid.to_s
- }
- graphql_mutation(:merge_request_request_attention, variables.merge(input),
- <<-QL.strip_heredoc
- clientMutationId
- errors
- QL
- )
- end
-
- def mutation_response
- graphql_mutation_response(:merge_request_request_attention)
- end
-
- def mutation_errors
- mutation_response['errors']
- end
-
- before_all do
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- it 'is successful' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).to be_empty
- end
-
- context 'when current user is not allowed to update the merge request' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- end
- end
-
- context 'when user is not a reviewer' do
- let(:input) { { user_id: global_id_of(create(:user)) } }
-
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).not_to be_empty
- end
- end
-
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(graphql_errors[0]["message"]).to eq "Feature disabled"
- end
- end
-end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb
new file mode 100644
index 00000000000..be786256ef2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Setting reviewers of a merge request', :assume_throttled do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user, developer_projects: [project]) }
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:reviewer2) { create(:user) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:input) { { reviewer_usernames: [reviewer.username] } }
+ let(:expected_result) do
+ [{ 'username' => reviewer.username }]
+ end
+
+ let(:mutation) do
+ variables = {
+ project_path: project.full_path,
+ iid: merge_request.iid.to_s
+ }
+ graphql_mutation(:merge_request_set_reviewers, variables.merge(input),
+ <<-QL.strip_heredoc
+ clientMutationId
+ errors
+ mergeRequest {
+ id
+ reviewers {
+ nodes {
+ username
+ }
+ }
+ }
+ QL
+ )
+ end
+
+ def mutation_response
+ graphql_mutation_response(:merge_request_set_reviewers)
+ end
+
+ def mutation_reviewer_nodes
+ mutation_response['mergeRequest']['reviewers']['nodes']
+ end
+
+ def run_mutation!
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ before do
+ project.add_developer(reviewer)
+ project.add_developer(reviewer2)
+
+ merge_request.update!(reviewers: [])
+ end
+
+ it 'returns an error if the user is not allowed to update the merge request' do
+ post_graphql_mutation(mutation, current_user: create(:user))
+
+ expect(graphql_errors).not_to be_empty
+ end
+
+ context 'when the current user does not have permission to add reviewers' do
+ let(:current_user) { create(:user) }
+
+ it 'does not change the reviewers' do
+ project.add_guest(current_user)
+
+ expect { run_mutation! }.not_to change { merge_request.reset.reviewers.pluck(:id) }
+
+ expect(graphql_errors).not_to be_empty
+ end
+ end
+
+ context 'with reviewers already assigned' do
+ before do
+ merge_request.reviewers = [reviewer2]
+ merge_request.save!
+ end
+
+ it 'replaces the reviewer' do
+ run_mutation!
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_reviewer_nodes).to match_array(expected_result)
+ end
+ end
+
+ context 'when passing an empty list of reviewers' do
+ let(:input) { { reviewer_usernames: [] } }
+
+ before do
+ merge_request.reviewers = [reviewer2]
+ merge_request.save!
+ end
+
+ it 'removes reviewer' do
+ run_mutation!
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_reviewer_nodes).to eq([])
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/update_reviewer_state_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/update_reviewer_state_spec.rb
deleted file mode 100644
index cf497cb2579..00000000000
--- a/spec/requests/api/graphql/mutations/merge_requests/update_reviewer_state_spec.rb
+++ /dev/null
@@ -1,65 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Toggle attention requested for reviewer' do
- include GraphqlHelpers
-
- let(:current_user) { create(:user) }
- let(:merge_request) { create(:merge_request, reviewers: [user]) }
- let(:project) { merge_request.project }
- let(:user) { create(:user) }
- let(:input) { { user_id: global_id_of(user) } }
-
- let(:mutation) do
- variables = {
- project_path: project.full_path,
- iid: merge_request.iid.to_s
- }
- graphql_mutation(:merge_request_toggle_attention_requested, variables.merge(input),
- <<-QL.strip_heredoc
- clientMutationId
- errors
- QL
- )
- end
-
- def mutation_response
- graphql_mutation_response(:merge_request_toggle_attention_requested)
- end
-
- def mutation_errors
- mutation_response['errors']
- end
-
- before do
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- it 'returns an error if the user is not allowed to update the merge request' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- end
-
- describe 'reviewer does not exist' do
- let(:input) { { user_id: global_id_of(create(:user)) } }
-
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).not_to be_empty
- end
- end
-
- describe 'reviewer exists' do
- it 'does not return an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).to be_empty
- end
- end
-end
diff --git a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
index 22b5f2d5112..9c3842db31a 100644
--- a/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
+++ b/spec/requests/api/graphql/mutations/notes/create/note_spec.rb
@@ -79,21 +79,29 @@ RSpec.describe 'Adding a Note' do
context 'for an issue' do
let(:noteable) { create(:issue, project: project) }
- let(:mutation) do
- variables = {
+ let(:mutation) { graphql_mutation(:create_note, variables) }
+ let(:variables) do
+ {
noteable_id: GitlabSchema.id_from_object(noteable).to_s,
- body: body,
- confidential: true
- }
-
- graphql_mutation(:create_note, variables)
+ body: body
+ }.merge(variables_extra)
end
before do
project.add_developer(current_user)
end
- it_behaves_like 'a Note mutation with confidential notes'
+ context 'when using internal param' do
+ let(:variables_extra) { { internal: true } }
+
+ it_behaves_like 'a Note mutation with confidential notes'
+ end
+
+ context 'when using deprecated confidential param' do
+ let(:variables_extra) { { confidential: true } }
+
+ it_behaves_like 'a Note mutation with confidential notes'
+ end
end
context 'when body only contains quick actions' do
diff --git a/spec/requests/api/graphql/mutations/releases/create_spec.rb b/spec/requests/api/graphql/mutations/releases/create_spec.rb
index 1e62942c29d..2541072b766 100644
--- a/spec/requests/api/graphql/mutations/releases/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/create_spec.rb
@@ -16,10 +16,10 @@ RSpec.describe 'Creation of a new release' do
let(:mutation_name) { :release_create }
- let(:tag_name) { 'v7.12.5'}
+ let(:tag_name) { 'v7.12.5' }
let(:tag_message) { nil }
- let(:ref) { 'master'}
- let(:name) { 'Version 7.12.5'}
+ let(:ref) { 'master' }
+ let(:name) { 'Version 7.12.5' }
let(:description) { 'Release 7.12.5 :rocket:' }
let(:released_at) { '2018-12-10' }
let(:milestones) { [milestone_12_3.title, milestone_12_4.title] }
diff --git a/spec/requests/api/graphql/mutations/releases/update_spec.rb b/spec/requests/api/graphql/mutations/releases/update_spec.rb
index 0fa3d7de299..33d4e57904c 100644
--- a/spec/requests/api/graphql/mutations/releases/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/releases/update_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Updating an existing release' do
let_it_be(:milestone_12_4) { create(:milestone, project: project, title: '12.4') }
let_it_be(:tag_name) { 'v1.1.0' }
- let_it_be(:name) { 'Version 7.12.5'}
+ let_it_be(:name) { 'Version 7.12.5' }
let_it_be(:description) { 'Release 7.12.5 :rocket:' }
let_it_be(:released_at) { '2018-12-10' }
let_it_be(:created_at) { '2018-11-05' }
diff --git a/spec/requests/api/graphql/mutations/remove_attention_request_spec.rb b/spec/requests/api/graphql/mutations/remove_attention_request_spec.rb
deleted file mode 100644
index 053559b039d..00000000000
--- a/spec/requests/api/graphql/mutations/remove_attention_request_spec.rb
+++ /dev/null
@@ -1,79 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Remove attention request' do
- include GraphqlHelpers
-
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user]) }
- let_it_be(:project) { merge_request.project }
-
- let(:input) { { user_id: global_id_of(user) } }
-
- let(:mutation) do
- variables = {
- project_path: project.full_path,
- iid: merge_request.iid.to_s
- }
- graphql_mutation(:merge_request_remove_attention_request, variables.merge(input),
- <<-QL.strip_heredoc
- clientMutationId
- errors
- QL
- )
- end
-
- def mutation_response
- graphql_mutation_response(:merge_request_remove_attention_request)
- end
-
- def mutation_errors
- mutation_response['errors']
- end
-
- before_all do
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- it 'is successful' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).to be_empty
- end
-
- context 'when current user is not allowed to update the merge request' do
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: create(:user))
-
- expect(graphql_errors).not_to be_empty
- end
- end
-
- context 'when user is not a reviewer' do
- let(:input) { { user_id: global_id_of(create(:user)) } }
-
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_errors).not_to be_empty
- end
- end
-
- context 'feature flag is disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'returns an error' do
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(graphql_errors[0]["message"]).to eq "Feature disabled"
- end
- end
-end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 9a3cea3ca14..264fa5732c3 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe 'Creating a Snippet' do
let(:title) { 'Initial title' }
let(:visibility_level) { 'public' }
let(:action) { :create }
- let(:file_1) { { filePath: 'example_file1', content: 'This is the example file 1' }}
- let(:file_2) { { filePath: 'example_file2', content: 'This is the example file 2' }}
+ let(:file_1) { { filePath: 'example_file1', content: 'This is the example file 1' } }
+ let(:file_2) { { filePath: 'example_file2', content: 'This is the example file 2' } }
let(:actions) { [{ action: action }.merge(file_1), { action: action }.merge(file_2)] }
let(:project_path) { nil }
let(:uploaded_files) { nil }
@@ -149,7 +149,7 @@ RSpec.describe 'Creating a Snippet' do
end
context 'when there non ActiveRecord errors' do
- let(:file_1) { { filePath: 'invalid://file/path', content: 'foobar' }}
+ let(:file_1) { { filePath: 'invalid://file/path', content: 'foobar' } }
it_behaves_like 'a mutation that returns errors in the response', errors: ['Repository Error creating the snippet - Invalid file name']
it_behaves_like 'does not create snippet'
diff --git a/spec/requests/api/graphql/mutations/timelogs/create_spec.rb b/spec/requests/api/graphql/mutations/timelogs/create_spec.rb
new file mode 100644
index 00000000000..eea04b89783
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/timelogs/create_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Create a timelog' do
+ include GraphqlHelpers
+
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:time_spent) { '1h' }
+
+ let(:current_user) { nil }
+ let(:users_container) { project }
+ let(:mutation) do
+ graphql_mutation(:timelogCreate, {
+ 'time_spent' => time_spent,
+ 'spent_at' => '2022-07-08',
+ 'summary' => 'Test summary',
+ 'issuable_id' => issuable.to_global_id.to_s
+ })
+ end
+
+ let(:mutation_response) { graphql_mutation_response(:timelog_create) }
+
+ context 'when issuable is an Issue' do
+ let_it_be(:issuable) { create(:issue, project: project) }
+
+ it_behaves_like 'issuable supports timelog creation mutation'
+ end
+
+ context 'when issuable is a MergeRequest' do
+ let_it_be(:issuable) { create(:merge_request, source_project: project) }
+
+ it_behaves_like 'issuable supports timelog creation mutation'
+ end
+
+ context 'when issuable is a WorkItem' do
+ let_it_be(:issuable) { create(:work_item, project: project, title: 'WorkItem') }
+
+ it_behaves_like 'issuable supports timelog creation mutation'
+ end
+
+ context 'when issuable is an Incident' do
+ let_it_be(:issuable) { create(:incident, project: project) }
+
+ it_behaves_like 'issuable supports timelog creation mutation'
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/timelogs/delete_spec.rb b/spec/requests/api/graphql/mutations/timelogs/delete_spec.rb
index b674e77f093..d304bfbdf00 100644
--- a/spec/requests/api/graphql/mutations/timelogs/delete_spec.rb
+++ b/spec/requests/api/graphql/mutations/timelogs/delete_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Delete a timelog' do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800)}
+ let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
let(:current_user) { nil }
let(:mutation) { graphql_mutation(:timelogDelete, { 'id' => timelog.to_global_id.to_s }) }
diff --git a/spec/requests/api/graphql/mutations/uploads/delete_spec.rb b/spec/requests/api/graphql/mutations/uploads/delete_spec.rb
new file mode 100644
index 00000000000..f44bf179397
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/uploads/delete_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Delete an upload' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:developer) { create(:user).tap { |user| group.add_developer(user) } }
+ let_it_be(:maintainer) { create(:user).tap { |user| group.add_maintainer(user) } }
+
+ let(:extra_params) { {} }
+ let(:params) { { filename: File.basename(upload.path), secret: upload.secret }.merge(extra_params) }
+ let(:mutation) { graphql_mutation(:uploadDelete, params) }
+ let(:mutation_response) { graphql_mutation_response(:upload_delete) }
+
+ shared_examples_for 'upload deletion' do
+ context 'when the user is not allowed to delete uploads' do
+ let(:current_user) { developer }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when the user is anonymous' do
+ let(:current_user) { nil }
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to delete uploads' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the upload' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['upload']).to include('id' => upload.to_global_id.to_s)
+ expect(mutation_response['errors']).to be_empty
+ end
+
+ context 'when upload does not exist' do
+ let(:params) { { filename: 'invalid', secret: upload.secret }.merge(extra_params) }
+
+ it 'returns an error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['upload']).to be_nil
+ expect(mutation_response['errors']).to match_array([
+ "The resource that you are attempting to access does not "\
+ "exist or you don't have permission to perform this action."
+ ])
+ end
+ end
+ end
+ end
+
+ context 'when deleting project upload' do
+ let_it_be_with_reload(:upload) { create(:upload, :issuable_upload, model: project) }
+
+ let(:extra_params) { { project_path: project.full_path } }
+
+ it_behaves_like 'upload deletion'
+ end
+
+ context 'when deleting group upload' do
+ let_it_be_with_reload(:upload) { create(:upload, :namespace_upload, model: group) }
+
+ let(:extra_params) { { group_path: group.full_path } }
+
+ it_behaves_like 'upload deletion'
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
index b1356bbe6fd..e7f4917ddde 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_from_task_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe "Create a work item from a task in a work item's description" do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
- let_it_be(:work_item, refind: true) { create(:work_item, project: project, description: '- [ ] A task in a list', lock_version: 3) }
+ let_it_be(:work_item, refind: true) { create(:work_item, :confidential, project: project, description: '- [ ] A task in a list', lock_version: 3) }
let(:lock_version) { work_item.lock_version }
let(:input) do
@@ -48,6 +48,7 @@ RSpec.describe "Create a work item from a task in a work item's description" do
expect(created_work_item.issue_type).to eq('task')
expect(created_work_item.work_item_type.base_type).to eq('task')
expect(created_work_item.work_item_parent).to eq(work_item)
+ expect(created_work_item).to be_confidential
expect(mutation_response['workItem']).to include('id' => work_item.to_global_id.to_s)
expect(mutation_response['newWorkItem']).to include('id' => created_work_item.to_global_id.to_s)
end
diff --git a/spec/requests/api/graphql/mutations/work_items/create_spec.rb b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
index 911568bc39f..8233821053f 100644
--- a/spec/requests/api/graphql/mutations/work_items/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/create_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe 'Create a work item' do
{
'title' => 'new title',
'description' => 'new description',
+ 'confidential' => true,
'workItemTypeId' => WorkItems::Type.default_by_type(:task).to_global_id.to_s
}
end
@@ -38,6 +39,7 @@ RSpec.describe 'Create a work item' do
expect(response).to have_gitlab_http_status(:success)
expect(created_work_item.issue_type).to eq('task')
+ expect(created_work_item).to be_confidential
expect(created_work_item.work_item_type.base_type).to eq('task')
expect(mutation_response['workItem']).to include(
input.except('workItemTypeId').merge(
@@ -127,7 +129,7 @@ RSpec.describe 'Create a work item' do
end
context 'when parent work item is not found' do
- let_it_be(:parent) { build_stubbed(:work_item, id: non_existing_record_id)}
+ let_it_be(:parent) { build_stubbed(:work_item, id: non_existing_record_id) }
it 'returns a top level error' do
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index 77f7b9bacef..909d6549fa5 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -34,6 +34,10 @@ RSpec.describe 'Update a work item' do
context 'when user has permissions to update a work item' do
let(:current_user) { developer }
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::WorkItems::Update }
+ end
+
context 'when the work item is open' do
it 'closes and updates the work item' do
expect do
@@ -71,36 +75,48 @@ RSpec.describe 'Update a work item' do
end
end
- context 'when unsupported widget input is sent' do
- let_it_be(:test_case) { create(:work_item_type, :default, :test_case, name: 'some_test_case_name') }
- let_it_be(:work_item) { create(:work_item, work_item_type: test_case, project: project) }
-
- let(:input) do
- {
- 'hierarchyWidget' => {}
+ context 'when updating confidentiality' do
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ confidential
}
+ errors
+ FIELDS
end
- it_behaves_like 'a mutation that returns top-level errors',
- errors: ["Following widget keys are not supported by some_test_case_name type: [:hierarchy_widget]"]
- end
+ shared_examples 'toggling confidentiality' do
+ it 'successfully updates work item' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :confidential).from(values[:old]).to(values[:new])
- it_behaves_like 'has spam protection' do
- let(:mutation_class) { ::Mutations::WorkItems::Update }
- end
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']).to include(
+ 'confidential' => values[:new]
+ )
+ end
+ end
- context 'when the work_items feature flag is disabled' do
- before do
- stub_feature_flags(work_items: false)
+ context 'when setting as confidential' do
+ let(:input) { { 'confidential' => true } }
+
+ it_behaves_like 'toggling confidentiality' do
+ let(:values) { { old: false, new: true } }
+ end
end
- it 'does not update the work item and returns and error' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to not_change(work_item, :title)
+ context 'when setting as non-confidential' do
+ let(:input) { { 'confidential' => false } }
- expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
+ before do
+ work_item.update!(confidential: true)
+ end
+
+ it_behaves_like 'toggling confidentiality' do
+ let(:values) { { old: true, new: false } }
+ end
end
end
@@ -128,26 +144,90 @@ RSpec.describe 'Update a work item' do
end
end
- context 'with weight widget input' do
+ context 'with due and start date widget input' do
+ let(:start_date) { Date.today }
+ let(:due_date) { 1.week.from_now.to_date }
let(:fields) do
<<~FIELDS
- workItem {
- widgets {
- type
- ... on WorkItemWidgetWeight {
- weight
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetStartAndDueDate {
+ startDate
+ dueDate
+ }
}
}
- }
- errors
+ errors
FIELDS
end
- it_behaves_like 'update work item weight widget' do
- let(:new_weight) { 2 }
+ let(:input) do
+ { 'startAndDueDateWidget' => { 'startDate' => start_date.to_s, 'dueDate' => due_date.to_s } }
+ end
- let(:input) do
- { 'weightWidget' => { 'weight' => new_weight } }
+ it 'updates start and due date' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :start_date).from(nil).to(start_date).and(
+ change(work_item, :due_date).from(nil).to(due_date)
+ )
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'startDate' => start_date.to_s,
+ 'dueDate' => due_date.to_s,
+ 'type' => 'START_AND_DUE_DATE'
+ }
+ )
+ end
+
+ context 'when provided input is invalid' do
+ let(:due_date) { 1.week.ago.to_date }
+
+ it 'returns validation errors without the work item' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['workItem']).to be_nil
+ expect(mutation_response['errors']).to contain_exactly('Due date must be greater than or equal to start date')
+ end
+ end
+
+ context 'when dates were already set for the work item' do
+ before do
+ work_item.update!(start_date: start_date, due_date: due_date)
+ end
+
+ context 'when updating only start date' do
+ let(:input) do
+ { 'startAndDueDateWidget' => { 'startDate' => nil } }
+ end
+
+ it 'allows setting a single date to null' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :start_date).from(start_date).to(nil).and(
+ not_change(work_item, :due_date).from(due_date)
+ )
+ end
+ end
+
+ context 'when updating only due date' do
+ let(:input) do
+ { 'startAndDueDateWidget' => { 'dueDate' => nil } }
+ end
+
+ it 'allows setting a single date to null' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :due_date).from(due_date).to(nil).and(
+ not_change(work_item, :start_date).from(start_date)
+ )
+ end
end
end
end
@@ -179,7 +259,7 @@ RSpec.describe 'Update a work item' do
end
context 'when updating parent' do
- let_it_be(:work_item) { create(:work_item, :task, project: project) }
+ let_it_be(:work_item, reload: true) { create(:work_item, :task, project: project) }
let_it_be(:valid_parent) { create(:work_item, project: project) }
let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
@@ -346,5 +426,78 @@ RSpec.describe 'Update a work item' do
end
end
end
+
+ context 'when updating assignees' do
+ let(:fields) do
+ <<~FIELDS
+ workItem {
+ widgets {
+ type
+ ... on WorkItemWidgetAssignees {
+ assignees {
+ nodes {
+ id
+ username
+ }
+ }
+ }
+ }
+ }
+ errors
+ FIELDS
+ end
+
+ let(:input) do
+ { 'assigneesWidget' => { 'assigneeIds' => [developer.to_global_id.to_s] } }
+ end
+
+ it 'updates the work item assignee' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to change(work_item, :assignee_ids).from([]).to([developer.id])
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['workItem']['widgets']).to include(
+ {
+ 'type' => 'ASSIGNEES',
+ 'assignees' => {
+ 'nodes' => [
+ { 'id' => developer.to_global_id.to_s, 'username' => developer.username }
+ ]
+ }
+ }
+ )
+ end
+ end
+
+ context 'when unsupported widget input is sent' do
+ let_it_be(:test_case) { create(:work_item_type, :default, :test_case, name: 'some_test_case_name') }
+ let_it_be(:work_item) { create(:work_item, work_item_type: test_case, project: project) }
+
+ let(:input) do
+ {
+ 'hierarchyWidget' => {}
+ }
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors',
+ errors: ["Following widget keys are not supported by some_test_case_name type: [:hierarchy_widget]"]
+ end
+
+ context 'when the work_items feature flag is disabled' do
+ before do
+ stub_feature_flags(work_items: false)
+ end
+
+ it 'does not update the work item and returns and error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ work_item.reload
+ end.to not_change(work_item, :title)
+
+ expect(mutation_response['errors']).to contain_exactly('`work_items` feature flag disabled for this project')
+ end
+ end
end
end
diff --git a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
index 37cc502103d..8d8a0baae36 100644
--- a/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
+++ b/spec/requests/api/graphql/namespace/root_storage_statistics_spec.rb
@@ -49,7 +49,7 @@ RSpec.describe 'rendering namespace statistics' do
it_behaves_like 'a working namespace with storage statistics query'
context 'when the namespace is public' do
- let(:group) { create(:group, :public)}
+ let(:group) { create(:group, :public) }
it 'hides statistics for unauthenticated requests' do
post_graphql(query, current_user: nil)
diff --git a/spec/requests/api/graphql/packages/conan_spec.rb b/spec/requests/api/graphql/packages/conan_spec.rb
index 1f3732980d9..5bd5a71bbeb 100644
--- a/spec/requests/api/graphql/packages/conan_spec.rb
+++ b/spec/requests/api/graphql/packages/conan_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe 'conan package details' do
let_it_be(:package) { create(:conan_package, project: project) }
let(:metadata) { query_graphql_fragment('ConanMetadata') }
- let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
+ let(:package_files_metadata) { query_graphql_fragment('ConanFileMetadata') }
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
diff --git a/spec/requests/api/graphql/packages/helm_spec.rb b/spec/requests/api/graphql/packages/helm_spec.rb
index 397096f70db..1675b8faa23 100644
--- a/spec/requests/api/graphql/packages/helm_spec.rb
+++ b/spec/requests/api/graphql/packages/helm_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'helm package details' do
let_it_be(:package) { create(:helm_package, project: project) }
- let(:package_files_metadata) {query_graphql_fragment('HelmFileMetadata')}
+ let(:package_files_metadata) { query_graphql_fragment('HelmFileMetadata') }
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb
index 0335c1085b4..c28b37db5af 100644
--- a/spec/requests/api/graphql/packages/package_spec.rb
+++ b/spec/requests/api/graphql/packages/package_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'package details' do
let(:depth) { 3 }
let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
let(:metadata) { query_graphql_fragment('ComposerMetadata') }
- let(:package_files) {all_graphql_fields_for('PackageFile')}
+ let(:package_files) { all_graphql_fields_for('PackageFile') }
let(:package_global_id) { global_id_of(composer_package) }
let(:package_details) { graphql_data_at(:package) }
diff --git a/spec/requests/api/graphql/project/base_service_spec.rb b/spec/requests/api/graphql/project/base_service_spec.rb
index 5dc0f55db88..58d10ade8cf 100644
--- a/spec/requests/api/graphql/project/base_service_spec.rb
+++ b/spec/requests/api/graphql/project/base_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe 'query Jira service' do
)
end
- let(:services) { graphql_data.dig('project', 'services', 'nodes')}
+ let(:services) { graphql_data.dig('project', 'services', 'nodes') }
it_behaves_like 'unauthorized users cannot read services'
diff --git a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
index 2b85704f479..2fe5fb593fe 100644
--- a/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
+++ b/spec/requests/api/graphql/project/error_tracking/sentry_detailed_error_request_spec.rb
@@ -34,6 +34,8 @@ RSpec.describe 'getting a detailed sentry error' do
context 'when data is loading via reactive cache' do
before do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+
post_graphql(query, current_user: current_user)
end
@@ -48,6 +50,10 @@ RSpec.describe 'getting a detailed sentry error' do
.to receive(:issue_details)
.and_return({ issue: sentry_detailed_error })
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event)
+ .with('error_tracking_view_details', values: current_user.id)
+
post_graphql(query, current_user: current_user)
end
diff --git a/spec/requests/api/graphql/project/fork_targets_spec.rb b/spec/requests/api/graphql/project/fork_targets_spec.rb
new file mode 100644
index 00000000000..b21a11ff4dc
--- /dev/null
+++ b/spec/requests/api/graphql/project/fork_targets_spec.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'getting a list of fork targets for a project' do
+ include GraphqlHelpers
+
+ let_it_be(:group) { create(:group) }
+ let_it_be(:another_group) { create(:group) }
+ let_it_be(:project) { create(:project, :private, group: group) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+
+ let(:current_user) { user }
+ let(:fields) do
+ <<~GRAPHQL
+ forkTargets{
+ nodes { id name fullPath visibility }
+ }
+ GRAPHQL
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ fields
+ )
+ end
+
+ before_all do
+ group.add_owner(user)
+ another_group.add_owner(user)
+ end
+
+ context 'when user has access to the project' do
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns fork targets for the project' do
+ expect(graphql_data.dig('project', 'forkTargets', 'nodes')).to match_array(
+ [user.namespace, project.namespace, another_group].map do |target|
+ hash_including(
+ {
+ 'id' => target.to_global_id.to_s,
+ 'name' => target.name,
+ 'fullPath' => target.full_path,
+ 'visibility' => target.visibility
+ }
+ )
+ end
+ )
+ end
+ end
+
+ context "when user doesn't have access to the project" do
+ let(:current_user) { create(:user) }
+
+ before do
+ post_graphql(query, current_user: current_user)
+ end
+
+ it 'does not return the project' do
+ expect(graphql_data).to eq('project' => nil)
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/project/jira_import_spec.rb b/spec/requests/api/graphql/project/jira_import_spec.rb
index 98a3f08baa6..202220f4bf6 100644
--- a/spec/requests/api/graphql/project/jira_import_spec.rb
+++ b/spec/requests/api/graphql/project/jira_import_spec.rb
@@ -56,8 +56,8 @@ RSpec.describe 'query Jira import data' do
)
end
- let(:jira_imports) { graphql_data.dig('project', 'jiraImports', 'nodes')}
- let(:jira_import_status) { graphql_data.dig('project', 'jiraImportStatus')}
+ let(:jira_imports) { graphql_data.dig('project', 'jiraImports', 'nodes') }
+ let(:jira_import_status) { graphql_data.dig('project', 'jiraImportStatus') }
context 'when user cannot read Jira import data' do
before do
@@ -89,11 +89,11 @@ RSpec.describe 'query Jira import data' do
context 'list of jira imports sorted ascending by scheduledAt time' do
it 'retuns list of jira imports' do
- jira_proket_keys = jira_imports.map {|ji| ji['jiraProjectKey']}
- usernames = jira_imports.map {|ji| ji.dig('scheduledBy', 'username')}
- imported_issues_count = jira_imports.map {|ji| ji.dig('importedIssuesCount')}
- failed_issues_count = jira_imports.map {|ji| ji.dig('failedToImportCount')}
- total_issue_count = jira_imports.map {|ji| ji.dig('totalIssueCount')}
+ jira_proket_keys = jira_imports.map { |ji| ji['jiraProjectKey'] }
+ usernames = jira_imports.map { |ji| ji.dig('scheduledBy', 'username') }
+ imported_issues_count = jira_imports.map { |ji| ji.dig('importedIssuesCount') }
+ failed_issues_count = jira_imports.map { |ji| ji.dig('failedToImportCount') }
+ total_issue_count = jira_imports.map { |ji| ji.dig('totalIssueCount') }
expect(jira_imports.size).to eq 2
expect(jira_proket_keys).to eq %w(BB AA)
diff --git a/spec/requests/api/graphql/project/project_members_spec.rb b/spec/requests/api/graphql/project/project_members_spec.rb
index 4225c3ad3e8..97a79ab3b0e 100644
--- a/spec/requests/api/graphql/project/project_members_spec.rb
+++ b/spec/requests/api/graphql/project/project_members_spec.rb
@@ -48,24 +48,6 @@ RSpec.describe 'getting project members information' do
expect_array_response(user_2)
end
-
- context 'when the use_keyset_aware_user_search_query FF is off' do
- before do
- stub_feature_flags(use_keyset_aware_user_search_query: false)
- end
-
- it 'raises error on the 2nd page due to missing cursor data' do
- fetch_members(project: parent_project, args: { search: 'Same Name', first: 1 })
-
- # user_2 because the "old" order was undeterministic (insert order), no tie-breaker column
- expect_array_response(user_2)
-
- next_cursor = graphql_data_at(:project, :projectMembers, :pageInfo, :endCursor)
- fetch_members(project: parent_project, args: { search: 'Same Name', first: 1, after: next_cursor })
-
- expect(graphql_errors.first['message']).to include('PG::UndefinedColumn')
- end
- end
end
end
end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 66742fcbeb6..6ef28392b8b 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'getting an work item list for a project' do
<<~QUERY
edges {
node {
- #{all_graphql_fields_for('workItems'.classify)}
+ #{all_graphql_fields_for('workItems'.classify, max_depth: 2)}
}
}
QUERY
diff --git a/spec/requests/api/graphql/project_query_spec.rb b/spec/requests/api/graphql/project_query_spec.rb
index 310a8e9fa33..d1b990629a1 100644
--- a/spec/requests/api/graphql/project_query_spec.rb
+++ b/spec/requests/api/graphql/project_query_spec.rb
@@ -190,4 +190,100 @@ RSpec.describe 'getting project information' do
end
end
end
+
+ context 'with timelog categories' do
+ let_it_be(:timelog_category) do
+ create(:timelog_category, namespace: project.project_namespace, name: 'TimelogCategoryTest')
+ end
+
+ let(:project_fields) do
+ <<~GQL
+ timelogCategories {
+ nodes {
+ #{all_graphql_fields_for('TimeTrackingTimelogCategory')}
+ }
+ }
+ GQL
+ end
+
+ context 'when user is guest and the project is public' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ it 'includes empty timelog categories array' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :timelogCategories, :nodes)).to match([])
+ end
+ end
+
+ context 'when user has reporter role' do
+ before do
+ project.add_reporter(current_user)
+ end
+
+ it 'returns the timelog category with all its fields' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :timelogCategories, :nodes))
+ .to contain_exactly(a_graphql_entity_for(timelog_category))
+ end
+
+ context 'when timelog_categories flag is disabled' do
+ before do
+ stub_feature_flags(timelog_categories: false)
+ end
+
+ it 'returns no timelog categories' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(:project, :timelogCategories)).to be_nil
+ end
+ end
+ end
+
+ context 'for N+1 queries' do
+ let!(:project1) { create(:project) }
+ let!(:project2) { create(:project) }
+
+ before do
+ project1.add_reporter(current_user)
+ project2.add_reporter(current_user)
+ end
+
+ it 'avoids N+1 database queries' do
+ pending('See: https://gitlab.com/gitlab-org/gitlab/-/issues/369396')
+
+ ctx = { current_user: current_user }
+
+ baseline_query = <<~GQL
+ query {
+ a: project(fullPath: "#{project1.full_path}") { ... p }
+ }
+
+ fragment p on Project {
+ timelogCategories { nodes { name } }
+ }
+ GQL
+
+ query = <<~GQL
+ query {
+ a: project(fullPath: "#{project1.full_path}") { ... p }
+ b: project(fullPath: "#{project2.full_path}") { ... p }
+ }
+
+ fragment p on Project {
+ timelogCategories { nodes { name } }
+ }
+ GQL
+
+ control = ActiveRecord::QueryRecorder.new do
+ run_with_clean_state(baseline_query, context: ctx)
+ end
+
+ expect { run_with_clean_state(query, context: ctx) }.not_to exceed_query_limit(control)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index f17d2ebbb7e..34644e5893a 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -8,7 +8,16 @@ RSpec.describe 'Query.work_item(id)' do
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private) }
- let_it_be(:work_item) { create(:work_item, project: project, description: '- List item', weight: 1) }
+ let_it_be(:work_item) do
+ create(
+ :work_item,
+ project: project,
+ description: '- List item',
+ start_date: Date.today,
+ due_date: 1.week.from_now
+ )
+ end
+
let_it_be(:child_item1) { create(:work_item, :task, project: project) }
let_it_be(:child_item2) { create(:work_item, :task, confidential: true, project: project) }
let_it_be(:child_link1) { create(:parent_link, work_item_parent: work_item, work_item: child_item1) }
@@ -16,7 +25,7 @@ RSpec.describe 'Query.work_item(id)' do
let(:current_user) { developer }
let(:work_item_data) { graphql_data['workItem'] }
- let(:work_item_fields) { all_graphql_fields_for('WorkItem') }
+ let(:work_item_fields) { all_graphql_fields_for('WorkItem', max_depth: 2) }
let(:global_id) { work_item.to_gid.to_s }
let(:query) do
@@ -41,8 +50,10 @@ RSpec.describe 'Query.work_item(id)' do
'lockVersion' => work_item.lock_version,
'state' => "OPEN",
'title' => work_item.title,
+ 'confidential' => work_item.confidential,
'workItemType' => hash_including('id' => work_item.work_item_type.to_gid.to_s),
- 'userPermissions' => { 'readWorkItem' => true, 'updateWorkItem' => true, 'deleteWorkItem' => false }
+ 'userPermissions' => { 'readWorkItem' => true, 'updateWorkItem' => true, 'deleteWorkItem' => false },
+ 'project' => hash_including('id' => project.to_gid.to_s, 'fullPath' => project.full_path)
)
end
@@ -163,14 +174,24 @@ RSpec.describe 'Query.work_item(id)' do
end
end
- describe 'weight widget' do
+ describe 'assignees widget' do
+ let(:assignees) { create_list(:user, 2) }
+ let(:work_item) { create(:work_item, project: project, assignees: assignees) }
+
let(:work_item_fields) do
<<~GRAPHQL
id
widgets {
type
- ... on WorkItemWidgetWeight {
- weight
+ ... on WorkItemWidgetAssignees {
+ allowsMultipleAssignees
+ canInviteMembers
+ assignees {
+ nodes {
+ id
+ username
+ }
+ }
}
}
GRAPHQL
@@ -181,30 +202,34 @@ RSpec.describe 'Query.work_item(id)' do
'id' => work_item.to_gid.to_s,
'widgets' => include(
hash_including(
- 'type' => 'WEIGHT',
- 'weight' => work_item.weight
+ 'type' => 'ASSIGNEES',
+ 'allowsMultipleAssignees' => boolean,
+ 'canInviteMembers' => boolean,
+ 'assignees' => {
+ 'nodes' => match_array(
+ assignees.map { |a| { 'id' => a.to_gid.to_s, 'username' => a.username } }
+ )
+ }
)
)
)
end
end
- describe 'assignees widget' do
- let(:assignees) { create_list(:user, 2) }
- let(:work_item) { create(:work_item, project: project, assignees: assignees) }
+ describe 'labels widget' do
+ let(:labels) { create_list(:label, 2, project: project) }
+ let(:work_item) { create(:work_item, project: project, labels: labels) }
let(:work_item_fields) do
<<~GRAPHQL
id
widgets {
type
- ... on WorkItemWidgetAssignees {
- allowsMultipleAssignees
- canInviteMembers
- assignees {
+ ... on WorkItemWidgetLabels {
+ labels {
nodes {
id
- username
+ title
}
}
}
@@ -217,12 +242,10 @@ RSpec.describe 'Query.work_item(id)' do
'id' => work_item.to_gid.to_s,
'widgets' => include(
hash_including(
- 'type' => 'ASSIGNEES',
- 'allowsMultipleAssignees' => boolean,
- 'canInviteMembers' => boolean,
- 'assignees' => {
+ 'type' => 'LABELS',
+ 'labels' => {
'nodes' => match_array(
- assignees.map { |a| { 'id' => a.to_gid.to_s, 'username' => a.username } }
+ labels.map { |a| { 'id' => a.to_gid.to_s, 'title' => a.title } }
)
}
)
@@ -230,6 +253,34 @@ RSpec.describe 'Query.work_item(id)' do
)
end
end
+
+ describe 'start and due date widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetStartAndDueDate {
+ startDate
+ dueDate
+ }
+ }
+ GRAPHQL
+ end
+
+ it 'returns widget information' do
+ expect(work_item_data).to include(
+ 'id' => work_item.to_gid.to_s,
+ 'widgets' => include(
+ hash_including(
+ 'type' => 'START_AND_DUE_DATE',
+ 'startDate' => work_item.start_date.to_s,
+ 'dueDate' => work_item.due_date.to_s
+ )
+ )
+ )
+ end
+ end
end
context 'when an Issue Global ID is provided' do
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index d94257c61eb..1c1ae73ddfe 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -463,50 +463,21 @@ RSpec.describe 'GraphQL' do
)
end
- context 'when new_graphql_keyset_pagination feature flag is off' do
- before do
- stub_feature_flags(new_graphql_keyset_pagination: false)
- end
-
- it 'paginates datetimes correctly when they have millisecond data' do
- # let's make sure we're actually querying a timestamp, just in case
- expect(Gitlab::Graphql::Pagination::Keyset::QueryBuilder)
- .to receive(:new).with(anything, anything, hash_including('created_at'), anything).and_call_original
+ it 'paginates datetimes correctly when they have millisecond data' do
+ execute_query
+ first_page = graphql_data
+ edges = first_page.dig(*issues_edges)
+ cursor = first_page.dig(*end_cursor)
- execute_query
- first_page = graphql_data
- edges = first_page.dig(*issues_edges)
- cursor = first_page.dig(*end_cursor)
+ expect(edges.count).to eq(6)
+ expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
- expect(edges.count).to eq(6)
- expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
+ execute_query(after: cursor)
+ second_page = graphql_data
+ edges = second_page.dig(*issues_edges)
- execute_query(after: cursor)
- second_page = graphql_data
- edges = second_page.dig(*issues_edges)
-
- expect(edges.count).to eq(4)
- expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
- end
- end
-
- context 'when new_graphql_keyset_pagination feature flag is on' do
- it 'paginates datetimes correctly when they have millisecond data' do
- execute_query
- first_page = graphql_data
- edges = first_page.dig(*issues_edges)
- cursor = first_page.dig(*end_cursor)
-
- expect(edges.count).to eq(6)
- expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
-
- execute_query(after: cursor)
- second_page = graphql_data
- edges = second_page.dig(*issues_edges)
-
- expect(edges.count).to eq(4)
- expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
- end
+ expect(edges.count).to eq(4)
+ expect(edges.last['node']['iid']).to eq(issues[0].iid.to_s)
end
end
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index a7b4bea362f..4fed7dd7624 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe API::GroupVariables do
it 'creates variable' do
expect do
post api("/groups/#{group.id}/variables", user), params: { key: 'TEST_VARIABLE_2', value: 'PROTECTED_VALUE_2', protected: true, masked: true }
- end.to change {group.variables.count}.by(1)
+ end.to change { group.variables.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
@@ -105,7 +105,7 @@ RSpec.describe API::GroupVariables do
it 'creates variable with optional attributes' do
expect do
post api("/groups/#{group.id}/variables", user), params: { variable_type: 'file', key: 'TEST_VARIABLE_2', value: 'VALUE_2' }
- end.to change {group.variables.count}.by(1)
+ end.to change { group.variables.count }.by(1)
expect(response).to have_gitlab_http_status(:created)
expect(json_response['key']).to eq('TEST_VARIABLE_2')
@@ -119,7 +119,7 @@ RSpec.describe API::GroupVariables do
it 'does not allow to duplicate variable key' do
expect do
post api("/groups/#{group.id}/variables", user), params: { key: variable.key, value: 'VALUE_2' }
- end.to change {group.variables.count}.by(0)
+ end.to change { group.variables.count }.by(0)
expect(response).to have_gitlab_http_status(:bad_request)
end
@@ -207,7 +207,7 @@ RSpec.describe API::GroupVariables do
delete api("/groups/#{group.id}/variables/#{variable.key}", user)
expect(response).to have_gitlab_http_status(:no_content)
- end.to change {group.variables.count}.by(-1)
+ end.to change { group.variables.count }.by(-1)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
diff --git a/spec/requests/api/groups_spec.rb b/spec/requests/api/groups_spec.rb
index 3bc3cce5310..bc37f8e4655 100644
--- a/spec/requests/api/groups_spec.rb
+++ b/spec/requests/api/groups_spec.rb
@@ -864,7 +864,7 @@ RSpec.describe API::Groups do
end
describe 'PUT /groups/:id' do
- let(:new_group_name) { 'New Group'}
+ let(:new_group_name) { 'New Group' }
let(:file_path) { 'spec/fixtures/dk.png' }
it_behaves_like 'group avatar upload' do
diff --git a/spec/requests/api/helpers_spec.rb b/spec/requests/api/helpers_spec.rb
index 8961f3177b6..e29e5c31a34 100644
--- a/spec/requests/api/helpers_spec.rb
+++ b/spec/requests/api/helpers_spec.rb
@@ -26,8 +26,8 @@ RSpec.describe API::Helpers do
}
end
- let(:header) { }
- let(:request) { Grape::Request.new(env)}
+ let(:header) {}
+ let(:request) { Grape::Request.new(env) }
let(:params) { request.params }
before do
@@ -539,7 +539,7 @@ RSpec.describe API::Helpers do
let(:token) { create(:oauth_access_token) }
before do
- env['HTTP_AUTHORIZATION'] = "Bearer #{token.token}"
+ env['HTTP_AUTHORIZATION'] = "Bearer #{token.plaintext_token}"
end
it_behaves_like 'sudo'
diff --git a/spec/requests/api/integrations_spec.rb b/spec/requests/api/integrations_spec.rb
index b2db7f7caef..1e8061f9606 100644
--- a/spec/requests/api/integrations_spec.rb
+++ b/spec/requests/api/integrations_spec.rb
@@ -66,6 +66,7 @@ RSpec.describe API::Integrations do
mattermost: %i[deployment_channel labels_to_be_notified],
mock_ci: %i[enable_ssl_verification],
prometheus: %i[manual_configuration],
+ pumble: %i[branches_to_be_notified notify_only_broken_pipelines],
slack: %i[alert_events alert_channel deployment_channel labels_to_be_notified],
unify_circuit: %i[branches_to_be_notified notify_only_broken_pipelines],
webex_teams: %i[branches_to_be_notified notify_only_broken_pipelines]
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index acfe476a864..e100684018a 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -376,10 +376,17 @@ RSpec.describe API::Internal::Base do
shared_examples 'rate limited request' do
let(:action) { 'git-upload-pack' }
let(:actor) { key }
+ let(:rate_limiter) { double(:rate_limiter, ip: "127.0.0.1", trusted_ip?: false) }
+
+ before do
+ allow(::Gitlab::Auth::IpRateLimiter).to receive(:new).with("127.0.0.1").and_return(rate_limiter)
+ end
it 'is throttled by rate limiter' do
allow(::Gitlab::ApplicationRateLimiter).to receive(:threshold).and_return(1)
+
expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:gitlab_shell_operation, scope: [action, project.full_path, actor]).twice.and_call_original
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(:gitlab_shell_operation, scope: [action, project.full_path, "127.0.0.1"]).and_call_original
request
@@ -402,6 +409,28 @@ RSpec.describe API::Internal::Base do
subject
end
end
+
+ context 'when rate_limit_gitlab_shell_by_ip feature flag is disabled' do
+ before do
+ stub_feature_flags(rate_limit_gitlab_shell_by_ip: false)
+ end
+
+ it 'is not throttled by rate limiter' do
+ expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ subject
+ end
+ end
+
+ context 'when the IP is in a trusted range' do
+ let(:rate_limiter) { double(:rate_limiter, ip: "127.0.0.1", trusted_ip?: true) }
+
+ it 'is not throttled by rate limiter' do
+ expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
+
+ subject
+ end
+ end
end
context "access granted" do
@@ -1451,7 +1480,7 @@ RSpec.describe API::Internal::Base do
describe 'POST /internal/two_factor_otp_check' do
let(:key_id) { key.id }
- let(:otp) { '123456'}
+ let(:otp) { '123456' }
subject do
post api('/internal/two_factor_otp_check'),
@@ -1472,7 +1501,7 @@ RSpec.describe API::Internal::Base do
describe 'POST /internal/two_factor_manual_otp_check' do
let(:key_id) { key.id }
- let(:otp) { '123456'}
+ let(:otp) { '123456' }
subject do
post api('/internal/two_factor_manual_otp_check'),
@@ -1493,7 +1522,7 @@ RSpec.describe API::Internal::Base do
describe 'POST /internal/two_factor_push_otp_check' do
let(:key_id) { key.id }
- let(:otp) { '123456'}
+ let(:otp) { '123456' }
subject do
post api('/internal/two_factor_push_otp_check'),
@@ -1514,7 +1543,7 @@ RSpec.describe API::Internal::Base do
describe 'POST /internal/two_factor_manual_otp_check' do
let(:key_id) { key.id }
- let(:otp) { '123456'}
+ let(:otp) { '123456' }
subject do
post api('/internal/two_factor_manual_otp_check'),
@@ -1534,7 +1563,7 @@ RSpec.describe API::Internal::Base do
describe 'POST /internal/two_factor_push_otp_check' do
let(:key_id) { key.id }
- let(:otp) { '123456'}
+ let(:otp) { '123456' }
subject do
post api('/internal/two_factor_push_otp_check'),
diff --git a/spec/requests/api/internal/error_tracking_spec.rb b/spec/requests/api/internal/error_tracking_spec.rb
index 69eb54d5ed2..4c420eb8505 100644
--- a/spec/requests/api/internal/error_tracking_spec.rb
+++ b/spec/requests/api/internal/error_tracking_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Internal::ErrorTracking do
let(:secret_token) { Gitlab::CurrentSettings.error_tracking_access_token }
let(:headers) do
- { ::API::Internal::ErrorTracking::GITLAB_ERROR_TRACKING_TOKEN_HEADER => Base64.encode64(secret_token) }
+ { ::API::Internal::ErrorTracking::GITLAB_ERROR_TRACKING_TOKEN_HEADER => secret_token }
end
describe 'GET /internal/error_tracking/allowed' do
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index c0a979995c9..67d8a18dfd8 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe API::Internal::Kubernetes do
end
end
- describe 'POST /internal/kubernetes/usage_metrics' do
+ describe 'POST /internal/kubernetes/usage_metrics', :clean_gitlab_redis_shared_state do
def send_request(headers: {}, params: {})
post api('/internal/kubernetes/usage_metrics'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
end
@@ -69,29 +69,102 @@ RSpec.describe API::Internal::Kubernetes do
context 'is authenticated for an agent' do
let!(:agent_token) { create(:cluster_agent_token) }
+ # Todo: Remove gitops_sync_count and k8s_api_proxy_request_count in the next milestone
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/369489
+ # We're only keeping it for backwards compatibility until KAS is released
+ # using `counts:` instead
+ context 'deprecated events' do
+ it 'returns no_content for valid events' do
+ send_request(params: { gitops_sync_count: 10, k8s_api_proxy_request_count: 5 })
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'returns no_content for counts of zero' do
+ send_request(params: { gitops_sync_count: 0, k8s_api_proxy_request_count: 0 })
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+
+ it 'returns 400 for non number' do
+ send_request(params: { gitops_sync_count: 'string', k8s_api_proxy_request_count: 1 })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns 400 for negative number' do
+ send_request(params: { gitops_sync_count: -1, k8s_api_proxy_request_count: 1 })
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'tracks events' do
+ counters = { gitops_sync_count: 10, k8s_api_proxy_request_count: 5 }
+ expected_counters = {
+ kubernetes_agent_gitops_sync: counters[:gitops_sync_count],
+ kubernetes_agent_k8s_api_proxy_request: counters[:k8s_api_proxy_request_count]
+ }
+
+ send_request(params: counters)
+
+ expect(Gitlab::UsageDataCounters::KubernetesAgentCounter.totals).to eq(expected_counters)
+ end
+ end
+
it 'returns no_content for valid events' do
- send_request(params: { gitops_sync_count: 10, k8s_api_proxy_request_count: 5 })
+ counters = { gitops_sync: 10, k8s_api_proxy_request: 5 }
+ unique_counters = { agent_users_using_ci_tunnel: [10] }
+
+ send_request(params: { counters: counters, unique_counters: unique_counters })
expect(response).to have_gitlab_http_status(:no_content)
end
it 'returns no_content for counts of zero' do
- send_request(params: { gitops_sync_count: 0, k8s_api_proxy_request_count: 0 })
+ counters = { gitops_sync: 0, k8s_api_proxy_request: 0 }
+ unique_counters = { agent_users_using_ci_tunnel: [] }
+
+ send_request(params: { counters: counters, unique_counters: unique_counters })
expect(response).to have_gitlab_http_status(:no_content)
end
- it 'returns 400 for non number' do
- send_request(params: { gitops_sync_count: 'string', k8s_api_proxy_request_count: 1 })
+ it 'returns 400 for non counter number' do
+ counters = { gitops_sync: 'string', k8s_api_proxy_request: 0 }
+
+ send_request(params: { counters: counters })
expect(response).to have_gitlab_http_status(:bad_request)
end
- it 'returns 400 for negative number' do
- send_request(params: { gitops_sync_count: -1, k8s_api_proxy_request_count: 1 })
+ it 'returns 400 for non unique_counter set' do
+ unique_counters = { agent_users_using_ci_tunnel: 1 }
+
+ send_request(params: { unique_counters: unique_counters })
expect(response).to have_gitlab_http_status(:bad_request)
end
+
+ it 'tracks events' do
+ counters = { gitops_sync: 10, k8s_api_proxy_request: 5 }
+ unique_counters = { agent_users_using_ci_tunnel: [10] }
+ expected_counters = {
+ kubernetes_agent_gitops_sync: counters[:gitops_sync],
+ kubernetes_agent_k8s_api_proxy_request: counters[:k8s_api_proxy_request]
+ }
+
+ send_request(params: { counters: counters, unique_counters: unique_counters })
+
+ expect(Gitlab::UsageDataCounters::KubernetesAgentCounter.totals).to eq(expected_counters)
+
+ expect(
+ Gitlab::UsageDataCounters::HLLRedisCounter
+ .unique_events(
+ event_names: 'agent_users_using_ci_tunnel',
+ start_date: Date.current, end_date: Date.current + 10
+ )
+ ).to eq(1)
+ end
end
end
@@ -180,4 +253,95 @@ RSpec.describe API::Internal::Kubernetes do
end
end
end
+
+ describe 'GET /internal/kubernetes/project_info' do
+ def send_request(headers: {}, params: {})
+ get api('/internal/kubernetes/project_info'), params: params, headers: headers.reverse_merge(jwt_auth_headers)
+ end
+
+ include_examples 'authorization'
+ include_examples 'agent authentication'
+
+ context 'an agent is found' do
+ let_it_be(:agent_token) { create(:cluster_agent_token) }
+
+ shared_examples 'agent token tracking'
+
+ context 'project is public' do
+ let(:project) { create(:project, :public) }
+
+ it 'returns expected data', :aggregate_failures do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:success)
+
+ expect(json_response).to match(
+ a_hash_including(
+ 'project_id' => project.id,
+ 'gitaly_info' => a_hash_including(
+ 'address' => match(/\.socket$/),
+ 'token' => 'secret',
+ 'features' => {}
+ ),
+ 'gitaly_repository' => a_hash_including(
+ 'storage_name' => project.repository_storage,
+ 'relative_path' => project.disk_path + '.git',
+ 'gl_repository' => "project-#{project.id}",
+ 'gl_project_path' => project.full_path
+ ),
+ 'default_branch' => project.default_branch_or_main
+ )
+ )
+ end
+
+ context 'repository is for project members only' do
+ let(:project) { create(:project, :public, :repository_private) }
+
+ it 'returns 404' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'project is private' do
+ let(:project) { create(:project, :private) }
+
+ it 'returns 404' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'and agent belongs to project' do
+ let(:agent_token) { create(:cluster_agent_token, agent: create(:cluster_agent, project: project)) }
+
+ it 'returns 200' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
+
+ context 'project is internal' do
+ let(:project) { create(:project, :internal) }
+
+ it 'returns 404' do
+ send_request(params: { id: project.id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'project does not exist' do
+ it 'returns 404' do
+ send_request(params: { id: non_existing_record_id }, headers: { 'Authorization' => "Bearer #{agent_token.token}" })
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
diff --git a/spec/requests/api/internal/workhorse_spec.rb b/spec/requests/api/internal/workhorse_spec.rb
index d40c14cc0fd..bcf63bf7c2f 100644
--- a/spec/requests/api/internal/workhorse_spec.rb
+++ b/spec/requests/api/internal/workhorse_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe API::Internal::Workhorse, :allow_forgery_protection do
end
it { expect_status(:success) }
+
it 'returns the temp upload path' do
subject
expect(json_response['TempPath']).to eq(Rails.root.join('tmp/tests/public/uploads/tmp').to_s)
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index cb351635081..a795b49c44e 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -447,7 +447,7 @@ RSpec.describe API::Invitations do
emails = 'email3@example.com,email4@example.com,email5@example.com,email6@example.com,email7@example.com'
- unresolved_n_plus_ones = 32 # currently there are 8 queries added per email
+ unresolved_n_plus_ones = 36 # currently there are 9 queries added per email
expect do
post invitations_url(group, maintainer), params: { email: emails, access_level: Member::DEVELOPER }
diff --git a/spec/requests/api/issue_links_spec.rb b/spec/requests/api/issue_links_spec.rb
index 90238c8bf76..98f72f22cdc 100644
--- a/spec/requests/api/issue_links_spec.rb
+++ b/spec/requests/api/issue_links_spec.rb
@@ -162,12 +162,29 @@ RSpec.describe API::IssueLinks do
end
context 'when unauthenticated' do
- it 'returns 401' do
- issue_link = create(:issue_link)
+ context 'when accessing an issue of a private project' do
+ it 'returns 401' do
+ issue_link = create(:issue_link)
- perform_request(issue_link.id)
+ perform_request(issue_link.id)
- expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ # This isn't ideal, see https://gitlab.com/gitlab-org/gitlab/-/issues/364077
+ context 'when accessing an issue of a public project' do
+ let(:project) { create(:project, :public) }
+ let(:issue) { create(:issue, project: project) }
+ let(:public_issue) { create(:issue, project: project) }
+
+ it 'returns 401' do
+ issue_link = create(:issue_link, source: issue, target: public_issue)
+
+ perform_request(issue_link.id)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
end
end
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 346f8975835..ec6cc060c83 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe API::Issues do
create(:project, :public, :repository, creator_id: user.id, namespace: user.namespace, merge_requests_access_level: ProjectFeature::PRIVATE)
end
+ let_it_be(:group) { create(:group, :public) }
+
let(:user2) { create(:user) }
let(:non_member) { create(:user) }
let_it_be(:guest) { create(:user) }
@@ -85,6 +87,8 @@ RSpec.describe API::Issues do
end
before_all do
+ group.add_reporter(user)
+ group.add_guest(guest)
project.add_reporter(user)
project.add_guest(guest)
private_mrs_project.add_reporter(user)
@@ -107,6 +111,22 @@ RSpec.describe API::Issues do
end
end
+ shared_examples 'returns project issues without confidential issues for guests' do
+ specify do
+ get api(api_url, guest)
+
+ expect_paginated_array_response_contain_exactly(open_issue.id, closed_issue.id)
+ end
+ end
+
+ shared_examples 'returns all project issues for reporters' do
+ specify do
+ get api(api_url, user)
+
+ expect_paginated_array_response_contain_exactly(open_issue.id, confidential_issue.id, closed_issue.id)
+ end
+ end
+
describe "GET /projects/:id/issues" do
let(:base_url) { "/projects/#{project.id}" }
@@ -183,6 +203,30 @@ RSpec.describe API::Issues do
end
end
+ context 'when user is an inherited member from the group' do
+ let!(:open_issue) { create(:issue, project: group_project) }
+ let!(:confidential_issue) { create(:issue, :confidential, project: group_project) }
+ let!(:closed_issue) { create(:issue, state: :closed, project: group_project) }
+
+ let!(:api_url) { "/projects/#{group_project.id}/issues" }
+
+ context 'and group project is public and issues are private' do
+ let_it_be(:group_project) do
+ create(:project, :public, issues_access_level: ProjectFeature::PRIVATE, group: group)
+ end
+
+ it_behaves_like 'returns project issues without confidential issues for guests'
+ it_behaves_like 'returns all project issues for reporters'
+ end
+
+ context 'and group project is private' do
+ let_it_be(:group_project) { create(:project, :private, group: group) }
+
+ it_behaves_like 'returns project issues without confidential issues for guests'
+ it_behaves_like 'returns all project issues for reporters'
+ end
+ end
+
it 'avoids N+1 queries' do
get api("/projects/#{project.id}/issues", user)
diff --git a/spec/requests/api/markdown_spec.rb b/spec/requests/api/markdown_spec.rb
index 47e1f007daa..3e702b05bc9 100644
--- a/spec/requests/api/markdown_spec.rb
+++ b/spec/requests/api/markdown_spec.rb
@@ -5,9 +5,11 @@ require "spec_helper"
RSpec.describe API::Markdown do
describe "POST /markdown" do
let(:user) {} # No-op. It gets overwritten in the contexts below.
+ let(:disable_authenticate_markdown_api) { false }
before do
stub_commonmark_sourcepos_disabled
+ stub_feature_flags(authenticate_markdown_api: false) if disable_authenticate_markdown_api
post api("/markdown", user), params: params
end
@@ -21,27 +23,53 @@ RSpec.describe API::Markdown do
end
end
- shared_examples "404 Project Not Found" do
- it "responses with 404 Not Found" do
+ shared_examples '404 Project Not Found' do
+ it 'responds with 404 Not Found' do
expect(response).to have_gitlab_http_status(:not_found)
expect(response.headers["Content-Type"]).to eq("application/json")
expect(json_response).to be_a(Hash)
- expect(json_response["message"]).to eq("404 Project Not Found")
+ expect(json_response['message']).to eq('404 Project Not Found')
end
end
- context "when arguments are invalid" do
- context "when text is missing" do
- let(:params) { {} }
+ shared_examples '400 Bad Request' do
+ it 'responds with 400 Bad Request' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(response.headers['Content-Type']).to eq('application/json')
+ expect(json_response).to be_a(Hash)
+ expect(json_response['error']).to eq('text is missing')
+ end
+ end
+
+ context 'when not logged in' do
+ let(:user) {}
+ let(:params) { {} }
- it "responses with 400 Bad Request" do
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(response.headers["Content-Type"]).to eq("application/json")
+ context 'and authenticate_markdown_api turned on' do
+ it 'responds with 401 Unathorized' do
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(response.headers['Content-Type']).to eq('application/json')
expect(json_response).to be_a(Hash)
- expect(json_response["error"]).to eq("text is missing")
+ expect(json_response['message']).to eq('401 Unauthorized')
end
end
+ context 'and authenticate_markdown_api turned off' do
+ let(:disable_authenticate_markdown_api) { true }
+
+ it_behaves_like '400 Bad Request'
+ end
+ end
+
+ context 'when arguments are invalid' do
+ let(:user) { create(:user) }
+
+ context 'when text is missing' do
+ let(:params) { {} }
+
+ it_behaves_like '400 Bad Request'
+ end
+
context "when project is not found" do
let(:params) { { text: "Hello world!", gfm: true, project: "Dummy project" } }
@@ -53,6 +81,7 @@ RSpec.describe API::Markdown do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
+ let(:user) { create(:user) }
let(:issue_url) { "http://#{Gitlab.config.gitlab.host}/#{issue.project.namespace.path}/#{issue.project.path}/-/issues/#{issue.iid}" }
let(:text) { ":tada: Hello world! :100: #{issue.to_reference}" }
@@ -131,14 +160,13 @@ RSpec.describe API::Markdown do
end
context 'when not logged in' do
- let(:user) { }
+ let(:user) {}
+ let(:disable_authenticate_markdown_api) { true }
it_behaves_like 'user without proper access'
end
context 'when logged in as user without access' do
- let(:user) { create(:user) }
-
it_behaves_like 'user without proper access'
end
@@ -175,8 +203,9 @@ RSpec.describe API::Markdown do
end
end
- context 'when not logged in' do
- let(:user) { }
+ context 'when not logged in and authenticate_markdown_api turned off' do
+ let(:user) {}
+ let(:disable_authenticate_markdown_api) { true }
it_behaves_like 'user without proper access'
end
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index ba82d2facc6..1b378788b6a 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe API::MavenPackages do
end
let(:version) { '1.0-SNAPSHOT' }
- let(:param_path) { "#{package_name}/#{version}"}
+ let(:param_path) { "#{package_name}/#{version}" }
before do
project.add_developer(user)
@@ -1000,20 +1000,45 @@ RSpec.describe API::MavenPackages do
context 'for sha1 file' do
let(:dummy_package) { double(Packages::Package) }
+ let(:file_upload) { fixture_file_upload('spec/fixtures/packages/maven/my-app-1.0-20180724.124855-1.pom.sha1') }
+ let(:stored_sha1) { File.read(file_upload.path) }
- it 'checks the sha1' do
+ subject(:upload) { upload_file_with_token(params: params, file_extension: 'pom.sha1') }
+
+ before do
# The sha verification done by the maven api is between:
# - the sha256 set by workhorse helpers
# - the sha256 of the sha1 of the uploaded package file
# We're going to send `file_upload` for the sha1 and stub the sha1 of the package file so that
# both sha256 being the same
- expect(::Packages::PackageFileFinder).to receive(:new).and_return(double(execute!: dummy_package))
- expect(dummy_package).to receive(:file_sha1).and_return(File.read(file_upload.path))
+ allow(::Packages::PackageFileFinder).to receive(:new).and_return(double(execute!: dummy_package))
+ allow(dummy_package).to receive(:file_sha1).and_return(stored_sha1)
+ end
- upload_file_with_token(params: params, file_extension: 'jar.sha1')
+ it 'returns no content' do
+ upload
expect(response).to have_gitlab_http_status(:no_content)
end
+
+ context 'when the stored sha1 is not the same' do
+ let(:sent_sha1) { File.read(file_upload.path) }
+ let(:stored_sha1) { 'wrong sha1' }
+
+ it 'logs an error and returns conflict' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(ArgumentError),
+ message: 'maven package file sha1 conflict',
+ stored_sha1: stored_sha1,
+ received_sha256: Digest::SHA256.hexdigest(sent_sha1),
+ sha256_hexdigest_of_stored_sha1: Digest::SHA256.hexdigest(stored_sha1)
+ )
+
+ upload
+
+ expect(response).to have_gitlab_http_status(:conflict)
+ end
+ end
end
context 'for md5 file' do
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index e4c2f17af47..9df9c75b020 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -3,14 +3,14 @@
require 'spec_helper'
RSpec.describe API::Members do
- let(:maintainer) { create(:user, username: 'maintainer_user') }
- let(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
- let(:developer) { create(:user) }
- let(:access_requester) { create(:user) }
- let(:stranger) { create(:user) }
- let(:user_with_minimal_access) { create(:user) }
-
- let(:project) do
+ let_it_be(:maintainer) { create(:user, username: 'maintainer_user') }
+ let_it_be(:maintainer2) { create(:user, username: 'user-with-maintainer-role') }
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:access_requester) { create(:user) }
+ let_it_be(:stranger) { create(:user) }
+ let_it_be(:user_with_minimal_access) { create(:user) }
+
+ let_it_be(:project, refind: true) do
create(:project, :public, creator_id: maintainer.id, group: create(:group, :public)) do |project|
project.add_maintainer(maintainer)
project.add_developer(developer, current_user: maintainer)
@@ -18,7 +18,7 @@ RSpec.describe API::Members do
end
end
- let!(:group) do
+ let_it_be(:group, refind: true) do
create(:group, :public) do |group|
group.add_owner(maintainer)
group.add_developer(developer, maintainer)
@@ -187,8 +187,8 @@ RSpec.describe API::Members do
end
context 'with a subgroup' do
- let(:group) { create(:group, :private)}
- let(:subgroup) { create(:group, :private, parent: group)}
+ let(:group) { create(:group, :private) }
+ let(:subgroup) { create(:group, :private, parent: group) }
let(:project) { create(:project, group: subgroup) }
before do
@@ -231,6 +231,33 @@ RSpec.describe API::Members do
end
end
end
+
+ context 'with ancestral membership' do
+ shared_examples 'response with correct access levels' do
+ it do
+ get api("/#{source_type.pluralize}/#{source.id}/members/#{all ? 'all/' : ''}#{developer.id}", developer)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['access_level']).to eq(Member::MAINTAINER)
+ end
+ end
+
+ before do
+ source.add_maintainer(developer)
+ end
+
+ include_examples 'response with correct access levels'
+
+ context 'having email invite' do
+ before do
+ Member
+ .find_by(source: group, user: developer)
+ .update!(invite_email: 'email@email.com')
+ end
+
+ include_examples 'response with correct access levels'
+ end
+ end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 695c0ed1749..2a03ae89389 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe API::MergeRequests do
context 'with merge status recheck projection' do
it 'checks mergeability asynchronously' do
- expect_next_instance_of(check_service_class) do |service|
+ expect_next_instances_of(check_service_class, (1..2)) do |service|
expect(service).not_to receive(:execute)
expect(service).to receive(:async_execute).and_call_original
end
@@ -595,6 +595,22 @@ RSpec.describe API::MergeRequests do
end
end
+ RSpec.shared_examples 'a non-cached MergeRequest api request' do |call_count|
+ it 'serializes merge request' do
+ expect(API::Entities::MergeRequestBasic).to receive(:represent).exactly(call_count).times.and_call_original
+
+ get api(endpoint_path)
+ end
+ end
+
+ RSpec.shared_examples 'a cached MergeRequest api request' do
+ it 'serializes merge request' do
+ expect(API::Entities::MergeRequestBasic).not_to receive(:represent)
+
+ get api(endpoint_path)
+ end
+ end
+
describe 'route shadowing' do
include GrapePathHelpers::NamedRouteMatcher
@@ -979,13 +995,43 @@ RSpec.describe API::MergeRequests do
end
end
- describe "GET /projects/:id/merge_requests" do
+ describe "GET /projects/:id/merge_requests", :use_clean_rails_memory_store_caching do
include_context 'with merge requests'
let(:endpoint_path) { "/projects/#{project.id}/merge_requests" }
it_behaves_like 'merge requests list'
+ context 'caching' do
+ let(:params) { {} }
+
+ before do
+ get api(endpoint_path)
+ end
+
+ context 'when it is cached' do
+ it_behaves_like 'a cached MergeRequest api request'
+ end
+
+ context 'when it is not cached' do
+ context 'when the status changes' do
+ before do
+ merge_request.mark_as_unchecked!
+ end
+
+ it_behaves_like 'a non-cached MergeRequest api request', 1
+ end
+
+ context 'when another user requests' do
+ before do
+ sign_in(user2)
+ end
+
+ it_behaves_like 'a non-cached MergeRequest api request', 4
+ end
+ end
+ end
+
it "returns 404 for non public projects" do
project = create(:project, :private)
@@ -1466,6 +1512,45 @@ RSpec.describe API::MergeRequests do
end
end
+ describe 'GET /projects/:id/merge_requests/:merge_request_iid/reviewers' do
+ it 'returns reviewers' do
+ reviewer = create(:user)
+ merge_request.merge_request_reviewers.create!(reviewer: reviewer)
+
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/reviewers", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(merge_request.merge_request_reviewers.size)
+
+ expect(json_response.last['user']['id']).to eq(reviewer.id)
+ expect(json_response.last['user']['name']).to eq(reviewer.name)
+ expect(json_response.last['user']['username']).to eq(reviewer.username)
+ expect(json_response.last['state']).to eq('unreviewed')
+ expect(json_response.last['updated_state_by']).to be_nil
+ expect(json_response.last['created_at']).to be_present
+ end
+
+ it 'returns a 404 when iid does not exist' do
+ get api("/projects/#{project.id}/merge_requests/#{non_existing_record_iid}/reviewers", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns a 404 when id is used instead of iid' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.id}/reviewers", user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'when merge request author has only guest access' do
+ it_behaves_like 'rejects user from accessing merge request info' do
+ let(:url) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/reviewers" }
+ end
+ end
+ end
+
describe 'GET /projects/:id/merge_requests/:merge_request_iid/commits' do
include_context 'with merge requests'
@@ -2482,39 +2567,37 @@ RSpec.describe API::MergeRequests do
let(:pipeline) { create(:ci_pipeline, project: project) }
it "returns merge_request in case of success" do
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+ expect { put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user) }
+ .to change { merge_request.reload.merged? }
+ .from(false)
+ .to(true)
expect(response).to have_gitlab_http_status(:ok)
end
- context 'when change_response_code_merge_status is enabled' do
- it "returns 422 if branch can't be merged" do
- allow_next_found_instance_of(MergeRequest) do |merge_request|
- allow(merge_request).to receive(:can_be_merged?).and_return(false)
+ context 'when the merge request fails to merge' do
+ it 'returns 422' do
+ expect_next_instance_of(::MergeRequests::MergeService) do |service|
+ expect(service).to receive(:execute)
end
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+ expect { put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user) }
+ .not_to change { merge_request.reload.merged? }
expect(response).to have_gitlab_http_status(:unprocessable_entity)
- expect(json_response['message']).to eq('Branch cannot be merged')
+ expect(json_response['message']).to eq("Branch cannot be merged")
end
end
- context 'when change_response_code_merge_status is disabled' do
- before do
- stub_feature_flags(change_response_code_merge_status: false)
+ it "returns 422 if branch can't be merged" do
+ allow_next_found_instance_of(MergeRequest) do |merge_request|
+ allow(merge_request).to receive(:can_be_merged?).and_return(false)
end
- it "returns 406 if branch can't be merged" do
- allow_next_found_instance_of(MergeRequest) do |merge_request|
- allow(merge_request).to receive(:can_be_merged?).and_return(false)
- end
-
- put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user)
- expect(response).to have_gitlab_http_status(:not_acceptable)
- expect(json_response['message']).to eq('Branch cannot be merged')
- end
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['message']).to eq('Branch cannot be merged')
end
it "returns 405 if merge_request is not open" do
diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb
index 79a38702354..5e64ac7d481 100644
--- a/spec/requests/api/metrics/dashboard/annotations_spec.rb
+++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe API::Metrics::Dashboard::Annotations do
let(:dashboard) { 'config/prometheus/common_metrics.yml' }
let(:starting_at) { Time.now.iso8601 }
let(:ending_at) { 1.hour.from_now.iso8601 }
- let(:params) { attributes_for(:metrics_dashboard_annotation, environment: environment, starting_at: starting_at, ending_at: ending_at, dashboard_path: dashboard)}
+ let(:params) { attributes_for(:metrics_dashboard_annotation, environment: environment, starting_at: starting_at, ending_at: ending_at, dashboard_path: dashboard) }
shared_examples 'POST /:source_type/:id/metrics_dashboard/annotations' do |source_type|
let(:url) { "/#{source_type.pluralize}/#{source.id}/metrics_dashboard/annotations" }
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index f6a65274ca2..89abb28140a 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe API::Notes do
system: false
end
- let(:test_url) {"/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes"}
+ let(:test_url) { "/projects/#{ext_proj.id}/issues/#{ext_issue.iid}/notes" }
shared_examples 'a notes request' do
it 'is a note array response' do
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index 62809b432af..3bcffac2760 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe API::NpmProjectPackages do
end
context 'with access token' do
- let(:headers) { build_token_auth_header(token.token) }
+ let(:headers) { build_token_auth_header(token.plaintext_token) }
it_behaves_like 'successfully downloads the file'
it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
@@ -95,7 +95,7 @@ RSpec.describe API::NpmProjectPackages do
it_behaves_like 'a package file that requires auth'
context 'with guest' do
- let(:headers) { build_token_auth_header(token.token) }
+ let(:headers) { build_token_auth_header(token.plaintext_token) }
it 'denies download when not enough permissions' do
project.add_guest(user)
@@ -307,8 +307,8 @@ RSpec.describe API::NpmProjectPackages do
expect { upload_package_with_token }
.to change { project.packages.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
- .and change { Packages::Dependency.count}.by(4)
- .and change { Packages::DependencyLink.count}.by(6)
+ .and change { Packages::Dependency.count }.by(4)
+ .and change { Packages::DependencyLink.count }.by(6)
expect(response).to have_gitlab_http_status(:ok)
end
@@ -323,8 +323,8 @@ RSpec.describe API::NpmProjectPackages do
expect { upload_package_with_token }
.to change { project.packages.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
- .and not_change { Packages::Dependency.count}
- .and change { Packages::DependencyLink.count}.by(6)
+ .and not_change { Packages::Dependency.count }
+ .and change { Packages::DependencyLink.count }.by(6)
end
end
end
@@ -356,7 +356,7 @@ RSpec.describe API::NpmProjectPackages do
end
def upload_with_token(package_name, params = {})
- upload_package(package_name, params.merge(access_token: token.token))
+ upload_package(package_name, params.merge(access_token: token.plaintext_token))
end
def upload_with_job_token(package_name, params = {})
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index 1b71f0f9de1..c1375288809 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -73,7 +73,7 @@ RSpec.describe API::NugetGroupPackages do
let(:include_prereleases) { true }
let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases }.compact }
- subject { get api(url), headers: {}}
+ subject { get api(url), headers: {} }
shared_examples 'handling mixed visibilities' do
where(:group_visibility, :subgroup_visibility, :expected_status) do
diff --git a/spec/requests/api/pages/pages_spec.rb b/spec/requests/api/pages/pages_spec.rb
index 0eb2ae64f43..7d44ff533aa 100644
--- a/spec/requests/api/pages/pages_spec.rb
+++ b/spec/requests/api/pages/pages_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe API::Pages do
end
it_behaves_like '404 response' do
- let(:request) { delete api("/projects/#{project.id}/pages", admin)}
+ let(:request) { delete api("/projects/#{project.id}/pages", admin) }
end
end
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 75183156c9d..cd4e8b30d8f 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -19,8 +19,8 @@ RSpec.describe API::PagesDomains do
end
let(:pages_domain_secure_params) { build(:pages_domain, domain: 'ssl.other-domain.test', project: project).slice(:domain, :certificate, :key) }
- let(:pages_domain_secure_key_missmatch_params) {build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) }
- let(:pages_domain_secure_missing_chain_params) {build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) }
+ let(:pages_domain_secure_key_missmatch_params) { build(:pages_domain, :with_trusted_chain, project: project).slice(:domain, :certificate, :key) }
+ let(:pages_domain_secure_missing_chain_params) { build(:pages_domain, :with_missing_chain, project: project).slice(:certificate) }
let(:route) { "/projects/#{project.id}/pages/domains" }
let(:route_domain) { "/projects/#{project.id}/pages/domains/#{pages_domain.domain}" }
diff --git a/spec/requests/api/personal_access_tokens_spec.rb b/spec/requests/api/personal_access_tokens_spec.rb
index 403c646ee32..8d8998cfdd6 100644
--- a/spec/requests/api/personal_access_tokens_spec.rb
+++ b/spec/requests/api/personal_access_tokens_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe API::PersonalAccessTokens do
context 'logged in as a non-Administrator' do
let_it_be(:current_user) { create(:user) }
let_it_be(:user) { create(:user) }
- let_it_be(:token) { create(:personal_access_token, user: current_user)}
+ let_it_be(:token) { create(:personal_access_token, user: current_user) }
let_it_be(:other_token) { create(:personal_access_token, user: user) }
let_it_be(:token_impersonated) { create(:personal_access_token, impersonation: true, user: current_user) }
@@ -100,7 +100,7 @@ RSpec.describe API::PersonalAccessTokens do
it 'fails to return PAT because no PAT exists with this id' do
get api(invalid_path, admin_user)
- expect(response).to have_gitlab_http_status(:unauthorized)
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 8d3622ca17d..670035187cb 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -101,6 +101,7 @@ ci_cd_settings:
job_token_scope_enabled: ci_job_token_scope_enabled
separated_caches: ci_separated_caches
opt_in_jwt: ci_opt_in_jwt
+ allow_fork_pipelines_to_run_in_parent_project: ci_allow_fork_pipelines_to_run_in_parent_project
build_import_state: # import_state
unexposed_attributes:
@@ -157,6 +158,7 @@ project_setting:
- cve_id_request_enabled
- mr_default_target_self
- target_platforms
+ - selective_code_owner_removals
build_service_desk_setting: # service_desk_setting
unexposed_attributes:
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index 8655e5b0238..afe5a7d4a21 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe API::ProjectImport, :aggregate_failures do
it 'executes a limited number of queries' do
control_count = ActiveRecord::QueryRecorder.new { subject }.count
- expect(control_count).to be <= 108
+ expect(control_count).to be <= 109
end
it 'schedules an import using a namespace' do
diff --git a/spec/requests/api/project_packages_spec.rb b/spec/requests/api/project_packages_spec.rb
index 5f4b8899a33..7a05da8e13f 100644
--- a/spec/requests/api/project_packages_spec.rb
+++ b/spec/requests/api/project_packages_spec.rb
@@ -86,6 +86,18 @@ RSpec.describe API::ProjectPackages do
expect(json_response).to include(a_hash_including('_links' => a_hash_including('web_path' => include(nested_project.namespace.full_path))))
end
end
+
+ context 'with JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: user) }
+
+ subject { get api(url, job_token: job.token) }
+
+ it_behaves_like 'returns packages', :project, :maintainer
+ it_behaves_like 'returns packages', :project, :developer
+ it_behaves_like 'returns packages', :project, :reporter
+ it_behaves_like 'returns packages', :project, :no_type
+ it_behaves_like 'returns packages', :project, :guest
+ end
end
context 'project is private' do
@@ -116,6 +128,19 @@ RSpec.describe API::ProjectPackages do
end
end
end
+
+ context 'with JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: user) }
+
+ subject { get api(url, job_token: job.token) }
+
+ it_behaves_like 'returns packages', :project, :maintainer
+ it_behaves_like 'returns packages', :project, :developer
+ it_behaves_like 'returns packages', :project, :reporter
+ it_behaves_like 'rejects packages access', :project, :no_type, :not_found
+ # TODO uncomment when https://gitlab.com/gitlab-org/gitlab/-/issues/370998 is resolved
+ # it_behaves_like 'rejects packages access', :project, :guest, :not_found
+ end
end
context 'with pagination params' do
@@ -177,6 +202,8 @@ RSpec.describe API::ProjectPackages do
end
describe 'GET /projects/:id/packages/:package_id' do
+ let(:single_package_schema) { 'public_api/v4/packages/package' }
+
subject { get api(package_url, user) }
shared_examples 'no destroy url' do
@@ -217,7 +244,7 @@ RSpec.describe API::ProjectPackages do
subject
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/packages/package')
+ expect(response).to match_response_schema(single_package_schema)
end
it 'returns 404 when the package does not exist' do
@@ -233,6 +260,18 @@ RSpec.describe API::ProjectPackages do
end
it_behaves_like 'no destroy url'
+
+ context 'with JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: user) }
+
+ subject { get api(package_url, job_token: job.token) }
+
+ it_behaves_like 'returns package', :project, :maintainer
+ it_behaves_like 'returns package', :project, :developer
+ it_behaves_like 'returns package', :project, :reporter
+ it_behaves_like 'returns package', :project, :no_type
+ it_behaves_like 'returns package', :project, :guest
+ end
end
context 'project is private' do
@@ -259,7 +298,7 @@ RSpec.describe API::ProjectPackages do
subject
expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/packages/package')
+ expect(response).to match_response_schema(single_package_schema)
end
it_behaves_like 'no destroy url'
@@ -273,6 +312,19 @@ RSpec.describe API::ProjectPackages do
it_behaves_like 'destroy url'
end
+ context 'with JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: user) }
+
+ subject { get api(package_url, job_token: job.token) }
+
+ it_behaves_like 'returns package', :project, :maintainer
+ it_behaves_like 'returns package', :project, :developer
+ it_behaves_like 'returns package', :project, :reporter
+ # TODO uncomment when https://gitlab.com/gitlab-org/gitlab/-/issues/370998 is resolved
+ # it_behaves_like 'rejects packages access', :project, :guest, :not_found
+ it_behaves_like 'rejects packages access', :project, :no_type, :not_found
+ end
+
context 'with pipeline' do
let!(:package1) { create(:npm_package, :with_build, project: project) }
@@ -355,6 +407,26 @@ RSpec.describe API::ProjectPackages do
expect(response).to have_gitlab_http_status(:no_content)
end
+
+ context 'with JOB-TOKEN auth' do
+ let(:job) { create(:ci_build, :running, user: user) }
+
+ it 'returns 403 for a user without enough permissions' do
+ project.add_developer(user)
+
+ expect { delete api(package_url, job_token: job.token) }.not_to change { ::Packages::Package.pending_destruction.count }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'returns 204' do
+ project.add_maintainer(user)
+
+ expect { delete api(package_url, job_token: job.token) }.to change { ::Packages::Package.pending_destruction.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end
+ end
end
context 'with a maven package' do
diff --git a/spec/requests/api/project_templates_spec.rb b/spec/requests/api/project_templates_spec.rb
index 070fd6db3dc..87d70a87f42 100644
--- a/spec/requests/api/project_templates_spec.rb
+++ b/spec/requests/api/project_templates_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe API::ProjectTemplates do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ expect(json_response.map { |t| t['key'] }).to match_array(%w(bug feature_proposal template_test))
end
it 'returns merge request templates' do
@@ -86,7 +86,7 @@ RSpec.describe API::ProjectTemplates do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/template_list')
- expect(json_response.map {|t| t['key']}).to match_array(%w(bug feature_proposal template_test))
+ expect(json_response.map { |t| t['key'] }).to match_array(%w(bug feature_proposal template_test))
end
it 'returns 400 for an unknown template type' do
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index ae689d7327b..94688833d88 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1093,7 +1093,7 @@ RSpec.describe API::Projects do
it 'does not create new project and respond with 403' do
allow_any_instance_of(User).to receive(:projects_limit_left).and_return(0)
expect { post api('/projects', user2), params: { name: 'foo' } }
- .to change {Project.count}.by(0)
+ .to change { Project.count }.by(0)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
@@ -2427,6 +2427,7 @@ RSpec.describe API::Projects do
expect(json_response['only_allow_merge_if_all_discussions_are_resolved']).to eq(project.only_allow_merge_if_all_discussions_are_resolved)
expect(json_response['ci_default_git_depth']).to eq(project.ci_default_git_depth)
expect(json_response['ci_forward_deployment_enabled']).to eq(project.ci_forward_deployment_enabled)
+ expect(json_response['ci_allow_fork_pipelines_to_run_in_parent_project']).to eq(project.ci_allow_fork_pipelines_to_run_in_parent_project)
expect(json_response['ci_separated_caches']).to eq(project.ci_separated_caches)
expect(json_response['merge_method']).to eq(project.merge_method.to_s)
expect(json_response['squash_option']).to eq(project.squash_option.to_s)
@@ -3692,6 +3693,7 @@ RSpec.describe API::Projects do
merge_method: 'ff',
ci_default_git_depth: 20,
ci_forward_deployment_enabled: false,
+ ci_allow_fork_pipelines_to_run_in_parent_project: false,
ci_separated_caches: false,
description: 'new description' }
diff --git a/spec/requests/api/protected_branches_spec.rb b/spec/requests/api/protected_branches_spec.rb
index 8efb822cb83..9f10eb1bb9f 100644
--- a/spec/requests/api/protected_branches_spec.rb
+++ b/spec/requests/api/protected_branches_spec.rb
@@ -3,14 +3,22 @@
require 'spec_helper'
RSpec.describe API::ProtectedBranches do
- let(:user) { create(:user) }
- let!(:project) { create(:project, :repository) }
+ let_it_be_with_reload(:project) { create(:project, :repository) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+
let(:protected_name) { 'feature' }
let(:branch_name) { protected_name }
+
let!(:protected_branch) do
create(:protected_branch, project: project, name: protected_name)
end
+ before_all do
+ project.add_maintainer(maintainer)
+ project.add_guest(guest)
+ end
+
describe "GET /projects/:id/protected_branches" do
let(:params) { {} }
let(:route) { "/projects/#{project.id}/protected_branches" }
@@ -29,9 +37,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a maintainer' do
- before do
- project.add_maintainer(user)
- end
+ let(:user) { maintainer }
context 'when search param is not present' do
it_behaves_like 'protected branches' do
@@ -49,9 +55,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a guest' do
- before do
- project.add_guest(user)
- end
+ let(:user) { guest }
it_behaves_like '403 response' do
let(:request) { get api(route, user) }
@@ -84,9 +88,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a maintainer' do
- before do
- project.add_maintainer(user)
- end
+ let(:user) { maintainer }
it_behaves_like 'protected branch'
@@ -104,9 +106,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a guest' do
- before do
- project.add_guest(user)
- end
+ let(:user) { guest }
it_behaves_like '403 response' do
let(:request) { get api(route, user) }
@@ -124,9 +124,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a maintainer' do
- before do
- project.add_maintainer(user)
- end
+ let(:user) { maintainer }
it 'protects a single branch' do
post post_endpoint, params: { name: branch_name }
@@ -226,13 +224,10 @@ RSpec.describe API::ProtectedBranches do
end
end
- context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
+ context 'when a policy restricts rule creation' do
+ it "prevents creations of the protected branch rule" do
+ disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
- it "prevents deletion of the protected branch rule" do
post post_endpoint, params: { name: branch_name }
expect(response).to have_gitlab_http_status(:forbidden)
@@ -241,9 +236,7 @@ RSpec.describe API::ProtectedBranches do
end
context 'when authenticated as a guest' do
- before do
- project.add_guest(user)
- end
+ let(:user) { guest }
it "returns a 403 error if guest" do
post post_endpoint, params: { name: branch_name }
@@ -254,12 +247,9 @@ RSpec.describe API::ProtectedBranches do
end
describe "DELETE /projects/:id/protected_branches/unprotect/:branch" do
+ let(:user) { maintainer }
let(:delete_endpoint) { api("/projects/#{project.id}/protected_branches/#{branch_name}", user) }
- before do
- project.add_maintainer(user)
- end
-
it "unprotects a single branch" do
delete delete_endpoint
@@ -277,12 +267,9 @@ RSpec.describe API::ProtectedBranches do
end
context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
-
it "prevents deletion of the protected branch rule" do
+ disallow(:destroy_protected_branch, protected_branch)
+
delete delete_endpoint
expect(response).to have_gitlab_http_status(:forbidden)
@@ -299,4 +286,9 @@ RSpec.describe API::ProtectedBranches do
end
end
end
+
+ def disallow(ability, protected_branch)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, ability, protected_branch).and_return(false)
+ end
end
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index 9e0d3780fd8..6c130bb4963 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe API::PypiPackages do
it_behaves_like 'deploy token for package GET requests'
context 'with group path as id' do
- let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple"}
+ let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple" }
it_behaves_like 'deploy token for package GET requests'
end
@@ -102,7 +102,7 @@ RSpec.describe API::PypiPackages do
it_behaves_like 'deploy token for package GET requests'
context 'with group path as id' do
- let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple/#{package_name}"}
+ let(:url) { "/groups/#{CGI.escape(group.full_path)}/-/packages/pypi/simple/#{package_name}" }
it_behaves_like 'deploy token for package GET requests'
end
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index 2345c0063dd..57b2e005929 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe API::Release::Links do
end
context 'when release does not exist' do
- let!(:release) { }
+ let!(:release) {}
it_behaves_like '404 response' do
let(:request) { get api("/projects/#{project.id}/releases/v0.1/assets/links", maintainer) }
@@ -98,7 +98,7 @@ RSpec.describe API::Release::Links do
end
context 'when the release does not exists' do
- let!(:release) { }
+ let!(:release) {}
it_behaves_like '403 response' do
let(:request) { get api("/projects/#{project.id}/releases/v0.1/assets/links", non_project_member) }
@@ -409,7 +409,7 @@ RSpec.describe API::Release::Links do
end
context 'when there are no corresponding release link' do
- let!(:release_link) { }
+ let!(:release_link) {}
it_behaves_like '404 response' do
let(:request) do
@@ -510,7 +510,7 @@ RSpec.describe API::Release::Links do
end
context 'when there are no corresponding release link' do
- let!(:release_link) { }
+ let!(:release_link) {}
it_behaves_like '404 response' do
let(:request) do
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index c050214ff50..1d9e3a6c887 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -962,7 +962,7 @@ RSpec.describe API::Releases do
context 'with milestones' do
let(:subject) { post api("/projects/#{project.id}/releases", maintainer), params: params }
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
- let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
+ let(:returned_milestones) { json_response['milestones'].map { |m| m['title'] } }
before do
params.merge!(milestone_params)
@@ -1120,7 +1120,7 @@ RSpec.describe API::Releases do
end
context 'when there are no corresponding releases' do
- let!(:release) { }
+ let!(:release) {}
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
@@ -1158,7 +1158,7 @@ RSpec.describe API::Releases do
end
context 'with milestones' do
- let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
+ let(:returned_milestones) { json_response['milestones'].map { |m| m['title'] } }
subject { put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params }
@@ -1310,7 +1310,7 @@ RSpec.describe API::Releases do
end
context 'when there are no corresponding releases' do
- let!(:release) { }
+ let!(:release) {}
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index cf0165d123f..3c22f918af5 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -92,6 +92,32 @@ RSpec.describe API::Repositories do
expect(json_response.map { |t| t["id"] }).not_to include(page_token)
end
end
+
+ context 'with pagination=none' do
+ context 'with recursive=1' do
+ it 'returns unpaginated recursive project paths tree' do
+ get api("#{route}?recursive=1&pagination=none", current_user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_an Array
+ expect(response).not_to include_pagination_headers
+ expect(json_response[4]['name']).to eq('html')
+ expect(json_response[4]['path']).to eq('files/html')
+ expect(json_response[4]['type']).to eq('tree')
+ expect(json_response[4]['mode']).to eq('040000')
+ end
+ end
+
+ context 'with recursive=0' do
+ it 'returns 400' do
+ get api("#{route}?recursive=0&pagination=none", current_user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error'])
+ .to eq('pagination cannot be "none" unless "recursive" is true')
+ end
+ end
+ end
end
context 'when unauthenticated', 'and project is public' do
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 4d2a69cd85b..66b78829e0d 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -350,6 +350,17 @@ RSpec.describe API::Search do
include_examples 'pagination', scope: :snippet_titles
end
end
+
+ it 'sets global search information for logging' do
+ expect(Gitlab::Instrumentation::GlobalSearchApi).to receive(:set_information).with(
+ type: 'basic',
+ level: 'global',
+ scope: 'issues',
+ search_duration_s: a_kind_of(Numeric)
+ )
+
+ get api(endpoint, user), params: { scope: 'issues', search: 'john doe' }
+ end
end
it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index d4a8e591622..6f0d5827a80 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
expect(json_response['spam_check_api_key']).to be_nil
expect(json_response['wiki_page_max_content_bytes']).to be_a(Integer)
expect(json_response['require_admin_approval_after_user_signup']).to eq(true)
- expect(json_response['personal_access_token_prefix']).to be_nil
+ expect(json_response['personal_access_token_prefix']).to eq('glpat-')
expect(json_response['admin_mode']).to be(false)
expect(json_response['whats_new_variant']).to eq('all_tiers')
expect(json_response['user_deactivation_emails_enabled']).to be(true)
diff --git a/spec/requests/api/snippets_spec.rb b/spec/requests/api/snippets_spec.rb
index 0ba1011684a..0dd6e484e8d 100644
--- a/spec/requests/api/snippets_spec.rb
+++ b/spec/requests/api/snippets_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ expect(json_response.map { |snippet| snippet['id'] } ).to contain_exactly(
public_snippet.id,
internal_snippet.id,
private_snippet.id)
@@ -75,7 +75,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
it 'returns snippets available for user in given time range' do
get api(path, personal_access_token: user_token)
- expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ expect(json_response.map { |snippet| snippet['id'] } ).to contain_exactly(
private_snippet_in_time_range1.id,
private_snippet_in_time_range2.id)
end
@@ -99,10 +99,10 @@ RSpec.describe API::Snippets, factory_default: :keep do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ expect(json_response.map { |snippet| snippet['id'] } ).to contain_exactly(
public_snippet.id,
public_snippet_other.id)
- expect(json_response.map { |snippet| snippet['web_url']} ).to contain_exactly(
+ expect(json_response.map { |snippet| snippet['web_url'] } ).to contain_exactly(
"http://localhost/-/snippets/#{public_snippet.id}",
"http://localhost/-/snippets/#{public_snippet_other.id}")
expect(json_response[0]['files'].first).to eq snippet_blob_file(public_snippet_other.blobs.first)
@@ -126,7 +126,7 @@ RSpec.describe API::Snippets, factory_default: :keep do
it 'returns public snippets available to user in given time range' do
get api(path, personal_access_token: user_token)
- expect(json_response.map { |snippet| snippet['id']} ).to contain_exactly(
+ expect(json_response.map { |snippet| snippet['id'] } ).to contain_exactly(
public_snippet_in_time_range.id)
end
end
diff --git a/spec/requests/api/topics_spec.rb b/spec/requests/api/topics_spec.rb
index e711414a895..72221e3fb6a 100644
--- a/spec/requests/api/topics_spec.rb
+++ b/spec/requests/api/topics_spec.rb
@@ -36,6 +36,22 @@ RSpec.describe API::Topics do
expect(json_response[2]['total_projects_count']).to eq(1)
end
+ context 'with without_projects' do
+ let_it_be(:topic_4) { create(:topic, name: 'unassigned topic', total_projects_count: 0) }
+
+ it 'returns topics without assigned projects' do
+ get api('/topics'), params: { without_projects: true }
+
+ expect(json_response.map { |t| t['id'] }).to contain_exactly(topic_4.id)
+ end
+
+ it 'returns topics without assigned projects' do
+ get api('/topics'), params: { without_projects: false }
+
+ expect(json_response.map { |t| t['id'] }).to contain_exactly(topic_1.id, topic_2.id, topic_3.id, topic_4.id)
+ end
+ end
+
context 'with search' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/requests/api/unleash_spec.rb b/spec/requests/api/unleash_spec.rb
index 7bdb89fb286..3ee895d9421 100644
--- a/spec/requests/api/unleash_spec.rb
+++ b/spec/requests/api/unleash_spec.rb
@@ -8,8 +8,8 @@ RSpec.describe API::Unleash do
let_it_be(:project, refind: true) { create(:project) }
let(:project_id) { project.id }
- let(:params) { }
- let(:headers) { }
+ let(:params) {}
+ let(:headers) {}
shared_examples 'authenticated request' do
context 'when using instance id' do
@@ -57,7 +57,7 @@ RSpec.describe API::Unleash do
context 'when using header' do
let(:client) { create(:operations_feature_flags_client, project: project) }
- let(:headers) { { "UNLEASH-INSTANCEID" => client.token }}
+ let(:headers) { { "UNLEASH-INSTANCEID" => client.token } }
it 'responds with OK' do
subject
diff --git a/spec/requests/api/user_counts_spec.rb b/spec/requests/api/user_counts_spec.rb
index 2d4705920cf..ab2aa87d1b7 100644
--- a/spec/requests/api/user_counts_spec.rb
+++ b/spec/requests/api/user_counts_spec.rb
@@ -43,21 +43,6 @@ RSpec.describe API::UserCounts do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_a Hash
expect(json_response['merge_requests']).to eq(2)
- expect(json_response['attention_requests']).to eq(0)
- end
-
- describe 'mr_attention_requests is disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it 'does not include attention_requests count' do
- create(:merge_request, source_project: project, author: user, assignees: [user])
-
- get api('/user_counts', user)
-
- expect(json_response.key?('attention_requests')).to be(false)
- end
end
end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 81ca2548995..26238a87209 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1184,7 +1184,7 @@ RSpec.describe API::Users do
post api('/users', admin),
params: {
email: 'invalid email',
- password: 'password',
+ password: User.random_password,
name: 'test'
}
expect(response).to have_gitlab_http_status(:bad_request)
@@ -1250,7 +1250,7 @@ RSpec.describe API::Users do
post api('/users', admin),
params: {
email: 'test@example.com',
- password: 'password',
+ password: User.random_password,
username: 'test',
name: 'foo'
}
@@ -1262,7 +1262,7 @@ RSpec.describe API::Users do
params: {
name: 'foo',
email: 'test@example.com',
- password: 'password',
+ password: User.random_password,
username: 'foo'
}
end.to change { User.count }.by(0)
@@ -1276,7 +1276,7 @@ RSpec.describe API::Users do
params: {
name: 'foo',
email: 'foo@example.com',
- password: 'password',
+ password: User.random_password,
username: 'test'
}
end.to change { User.count }.by(0)
@@ -1290,7 +1290,7 @@ RSpec.describe API::Users do
params: {
name: 'foo',
email: 'foo@example.com',
- password: 'password',
+ password: User.random_password,
username: 'TEST'
}
end.to change { User.count }.by(0)
@@ -1710,8 +1710,8 @@ RSpec.describe API::Users do
context "with existing user" do
before do
- post api("/users", admin), params: { email: 'test@example.com', password: 'password', username: 'test', name: 'test' }
- post api("/users", admin), params: { email: 'foo@bar.com', password: 'password', username: 'john', name: 'john' }
+ post api("/users", admin), params: { email: 'test@example.com', password: User.random_password, username: 'test', name: 'test' }
+ post api("/users", admin), params: { email: 'foo@bar.com', password: User.random_password, username: 'john', name: 'john' }
@user = User.all.last
end
diff --git a/spec/requests/git_http_spec.rb b/spec/requests/git_http_spec.rb
index 05b16119a0e..3ffca7e3c62 100644
--- a/spec/requests/git_http_spec.rb
+++ b/spec/requests/git_http_spec.rb
@@ -225,7 +225,7 @@ RSpec.describe 'Git HTTP requests' do
end
context 'when namespace exists' do
- let(:path) { "#{user.namespace.path}/new-project.git"}
+ let(:path) { "#{user.namespace.path}/new-project.git" }
context 'when authenticated' do
it 'creates a new project under the existing namespace' do
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index 43f0fc714b3..e6418c7694d 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Groups::MilestonesController do
milestones = json_response
expect(milestones.count).to eq(3)
- expect(milestones.map {|x| x['title']}).not_to include(private_milestone.title)
+ expect(milestones.map { |x| x['title'] }).not_to include(private_milestone.title)
end
end
diff --git a/spec/requests/jira_connect/subscriptions_controller_spec.rb b/spec/requests/jira_connect/subscriptions_controller_spec.rb
index b10d07b3771..d8f329f13f5 100644
--- a/spec/requests/jira_connect/subscriptions_controller_spec.rb
+++ b/spec/requests/jira_connect/subscriptions_controller_spec.rb
@@ -18,12 +18,12 @@ RSpec.describe JiraConnect::SubscriptionsController do
subject(:content_security_policy) { response.headers['Content-Security-Policy'] }
- it { is_expected.to include('http://self-managed-gitlab.com/-/jira_connect/oauth_application_ids')}
+ it { is_expected.to include('http://self-managed-gitlab.com/-/jira_connect/oauth_application_ids') }
context 'with no self-managed instance configured' do
let_it_be(:installation) { create(:jira_connect_installation, instance_url: '') }
- it { is_expected.not_to include('http://self-managed-gitlab.com')}
+ it { is_expected.not_to include('http://self-managed-gitlab.com') }
end
end
end
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index 70097234762..db3be617a53 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -22,6 +22,17 @@ RSpec.describe JwtController do
end
end
+ shared_examples 'a token that expires today' do
+ let(:pat) { create(:personal_access_token, user: user, scopes: ['api'], expires_at: Date.today ) }
+ let(:headers) { { authorization: credentials('personal_access_token', pat.token) } }
+
+ it 'fails authentication' do
+ get '/jwt/auth', params: parameters, headers: headers
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
context 'authenticating against container registry' do
context 'existing service' do
subject! { get '/jwt/auth', params: parameters }
@@ -104,6 +115,7 @@ RSpec.describe JwtController do
it_behaves_like 'rejecting a blocked user'
it_behaves_like 'user logging'
+ it_behaves_like 'a token that expires today'
end
end
@@ -253,6 +265,7 @@ RSpec.describe JwtController do
let(:credential_password) { personal_access_token.token }
it_behaves_like 'with valid credentials'
+ it_behaves_like 'a token that expires today'
end
context 'with user credentials token' do
diff --git a/spec/requests/lfs_http_spec.rb b/spec/requests/lfs_http_spec.rb
index acf83916f82..3529239a4d9 100644
--- a/spec/requests/lfs_http_spec.rb
+++ b/spec/requests/lfs_http_spec.rb
@@ -129,13 +129,13 @@ RSpec.describe 'Git LFS API and storage' do
it_behaves_like 'LFS http 200 blob response'
context 'when user password is expired' do
- let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
+ let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago) }
it_behaves_like 'LFS http 401 response'
end
context 'when user is blocked' do
- let_it_be(:user) { create(:user, :blocked)}
+ let_it_be(:user) { create(:user, :blocked) }
it_behaves_like 'LFS http 401 response'
end
@@ -347,17 +347,17 @@ RSpec.describe 'Git LFS API and storage' do
end
context 'when user password is expired' do
- let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago)}
+ let_it_be(:user) { create(:user, password_expires_at: 1.minute.ago) }
- let(:role) { :reporter}
+ let(:role) { :reporter }
it_behaves_like 'LFS http 401 response'
end
context 'when user is blocked' do
- let_it_be(:user) { create(:user, :blocked)}
+ let_it_be(:user) { create(:user, :blocked) }
- let(:role) { :reporter}
+ let(:role) { :reporter }
it_behaves_like 'LFS http 401 response'
end
@@ -1013,7 +1013,7 @@ RSpec.describe 'Git LFS API and storage' do
end
context 'when user is blocked' do
- let_it_be(:user) { create(:user, :blocked)}
+ let_it_be(:user) { create(:user, :blocked) }
it_behaves_like 'LFS http 401 response'
end
diff --git a/spec/requests/oauth/tokens_controller_spec.rb b/spec/requests/oauth/tokens_controller_spec.rb
index 3895304dbde..e4cb28cc42b 100644
--- a/spec/requests/oauth/tokens_controller_spec.rb
+++ b/spec/requests/oauth/tokens_controller_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Oauth::TokensController do
let(:cors_request_headers) { { 'Origin' => 'http://notgitlab.com' } }
let(:other_headers) { {} }
- let(:headers) { cors_request_headers.merge(other_headers)}
+ let(:headers) { cors_request_headers.merge(other_headers) }
let(:allowed_methods) { 'POST, OPTIONS' }
shared_examples 'cross-origin POST request' do
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 89d46b64311..65540f86d34 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -3,13 +3,15 @@
require 'spec_helper'
RSpec.describe 'value stream analytics events' do
+ include CycleAnalyticsHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, :repository, public_builds: false) }
let(:issue) { create(:issue, project: project, created_at: 2.days.ago) }
describe 'GET /:namespace/:project/value_stream_analytics/events/issues' do
- let(:first_issue_iid) { project.issues.sort_by_attribute(:created_desc).pluck(:iid).first.to_s }
- let(:first_mr_iid) { project.merge_requests.sort_by_attribute(:created_desc).pluck(:iid).first.to_s }
+ let(:first_issue_iid) { project.issues.sort_by_attribute(:created_desc).pick(:iid).to_s }
+ let(:first_mr_iid) { project.merge_requests.sort_by_attribute(:created_desc).pick(:iid).to_s }
before do
project.add_developer(user)
diff --git a/spec/requests/projects/merge_requests/diffs_spec.rb b/spec/requests/projects/merge_requests/diffs_spec.rb
index e17be1ff984..937b0f1d713 100644
--- a/spec/requests/projects/merge_requests/diffs_spec.rb
+++ b/spec/requests/projects/merge_requests/diffs_spec.rb
@@ -13,8 +13,6 @@ RSpec.describe 'Merge Requests Diffs' do
end
describe 'GET diffs_batch' do
- let(:headers) { {} }
-
shared_examples_for 'serializes diffs with expected arguments' do
it 'serializes paginated merge request diff collection' do
expect_next_instance_of(PaginatedDiffSerializer) do |instance|
@@ -24,6 +22,8 @@ RSpec.describe 'Merge Requests Diffs' do
end
subject
+
+ expect(response).to have_gitlab_http_status(:success)
end
end
@@ -40,7 +40,7 @@ RSpec.describe 'Merge Requests Diffs' do
}
end
- def go(extra_params = {})
+ def go(headers: {}, **extra_params)
params = {
namespace_id: project.namespace.to_param,
project_id: project,
@@ -54,13 +54,15 @@ RSpec.describe 'Merge Requests Diffs' do
end
context 'with caching', :use_clean_rails_memory_store_caching do
- subject { go(page: 0, per_page: 5) }
+ subject { go(headers: headers, page: 0, per_page: 5) }
+
+ let(:headers) { {} }
context 'when the request has not been cached' do
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20) }
- end
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+
+ it_behaves_like 'serializes diffs with expected arguments'
end
context 'when the request has already been cached' do
@@ -76,21 +78,61 @@ RSpec.describe 'Merge Requests Diffs' do
subject
end
+ context 'when using ETags' do
+ context 'when etag_merge_request_diff_batches is true' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+
+ it 'does not serialize diffs' do
+ expect(PaginatedDiffSerializer).not_to receive(:new)
+
+ go(headers: headers, page: 0, per_page: 5)
+
+ expect(response).to have_gitlab_http_status(:not_modified)
+ end
+ end
+
+ context 'when etag_merge_request_diff_batches is false' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+
+ before do
+ stub_feature_flags(etag_merge_request_diff_batches: false)
+ end
+
+ it 'does not serialize diffs' do
+ expect_next_instance_of(PaginatedDiffSerializer) do |instance|
+ expect(instance).not_to receive(:represent)
+ end
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
+
context 'with the different user' do
let(:another_user) { create(:user) }
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
before do
project.add_maintainer(another_user)
sign_in(another_user)
end
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20) }
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with a new unfoldable diff position' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+
let(:unfoldable_position) do
create(:diff_position)
end
@@ -103,80 +145,155 @@ RSpec.describe 'Merge Requests Diffs' do
end
end
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20) }
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with disabled display_merge_conflicts_in_diff feature' do
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(allow_tree_conflicts: false) }
+
before do
stub_feature_flags(display_merge_conflicts_in_diff: false)
end
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20).merge(allow_tree_conflicts: false) }
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with diff_head option' do
subject { go(page: 0, per_page: 5, diff_head: true) }
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_ref_head_diff: true) }
+
before do
merge_request.create_merge_head_diff!
end
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_ref_head_diff: true) }
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with the different pagination option' do
subject { go(page: 5, per_page: 5) }
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20) }
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with the different diff_view' do
subject { go(page: 0, per_page: 5, view: :parallel) }
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20).merge(diff_view: :parallel) }
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20).merge(diff_view: :parallel) }
+
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with the different expanded option' do
subject { go(page: 0, per_page: 5, expanded: true ) }
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20) }
+ let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
context 'with the different ignore_whitespace_change option' do
subject { go(page: 0, per_page: 5, w: 1) }
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::Compare }
- let(:expected_options) { collection_arguments(total_pages: 20) }
+ let(:collection) { Gitlab::Diff::FileCollection::Compare }
+ let(:expected_options) { collection_arguments(total_pages: 20) }
+
+ it_behaves_like 'serializes diffs with expected arguments'
+
+ context 'when using ETag caching' do
+ it_behaves_like 'serializes diffs with expected arguments' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
+ end
end
end
end
context 'when the paths is given' do
- subject { go(page: 0, per_page: 5, paths: %w[README CHANGELOG]) }
+ subject { go(headers: headers, page: 0, per_page: 5, paths: %w[README CHANGELOG]) }
+
+ before do
+ go(page: 0, per_page: 5, paths: %w[README CHANGELOG])
+ end
- it 'does not use cache' do
- expect(Rails.cache).not_to receive(:fetch).with(/cache:gitlab:PaginatedDiffSerializer/).and_call_original
+ context 'when using ETag caching' do
+ let(:headers) { { 'If-None-Match' => response.etag } }
- subject
+ context 'when etag_merge_request_diff_batches is true' do
+ it 'does not serialize diffs' do
+ expect(PaginatedDiffSerializer).not_to receive(:new)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_modified)
+ end
+ end
+
+ context 'when etag_merge_request_diff_batches is false' do
+ before do
+ stub_feature_flags(etag_merge_request_diff_batches: false)
+ end
+
+ it 'does not use cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(/cache:gitlab:PaginatedDiffSerializer/).and_call_original
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+ end
+ end
+
+ context 'when not using ETag caching' do
+ it 'does not use cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(/cache:gitlab:PaginatedDiffSerializer/).and_call_original
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
end
end
end
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index c761af86c16..9503dafcf2a 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -16,9 +16,16 @@ RSpec.describe 'merge requests discussions' do
login_as(user)
end
+ # rubocop:disable RSpec/InstanceVariable
def send_request
- get discussions_namespace_project_merge_request_path(namespace_id: project.namespace, project_id: project, id: merge_request.iid)
+ get(
+ discussions_namespace_project_merge_request_path(namespace_id: project.namespace, project_id: project, id: merge_request.iid),
+ headers: { 'If-None-Match' => @etag }
+ )
+
+ @etag = response.etag
end
+ # rubocop:enable RSpec/InstanceVariable
it 'returns 200' do
send_request
@@ -63,11 +70,6 @@ RSpec.describe 'merge requests discussions' do
let!(:award_emoji) { create(:award_emoji, awardable: first_note) }
let!(:author_membership) { project.add_maintainer(author) }
- before do
- # Make a request to cache the discussions
- send_request
- end
-
shared_examples 'cache miss' do
it 'does not hit a warm cache' do
expect_next_instance_of(DiscussionSerializer) do |serializer|
@@ -80,176 +82,195 @@ RSpec.describe 'merge requests discussions' do
end
end
- it 'gets cached on subsequent requests' do
- expect_next_instance_of(DiscussionSerializer) do |serializer|
- expect(serializer).not_to receive(:represent)
- end
+ shared_examples 'cache hit' do
+ it 'gets cached on subsequent requests' do
+ expect_next_instance_of(DiscussionSerializer) do |serializer|
+ expect(serializer).not_to receive(:represent)
+ end
- send_request
+ send_request
+ end
end
- context 'when a note in a discussion got updated' do
+ context 'when mr_discussions_http_cache and disabled_mr_discussions_redis_cache are enabled' do
before do
- first_note.update!(updated_at: 1.minute.from_now)
+ send_request
end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
- end
- end
+ it_behaves_like 'cache hit'
- context 'when a note in a discussion got its reference state updated' do
- before do
- reference.close!
- end
+ context 'when a note in a discussion got updated' do
+ before do
+ first_note.update!(updated_at: 1.minute.from_now)
+ end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when a note in a discussion got resolved' do
- before do
- travel_to(1.minute.from_now) do
- first_note.resolve!(user)
+ context 'when a note in a discussion got its reference state updated' do
+ before do
+ reference.close!
end
- end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when a note is added to a discussion' do
- let!(:third_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
+ context 'when a note in a discussion got resolved' do
+ before do
+ travel_to(1.minute.from_now) do
+ first_note.resolve!(user)
+ end
+ end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note, third_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when a note is removed from a discussion' do
- before do
- second_note.destroy!
- end
+ context 'when a note is added to a discussion' do
+ let!(:third_note) { create(:diff_note_on_merge_request, in_reply_to: first_note, noteable: merge_request, project: project) }
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note, third_note] }
+ end
end
- end
- context 'when an emoji is awarded to a note in discussion' do
- before do
- travel_to(1.minute.from_now) do
- create(:award_emoji, awardable: first_note)
+ context 'when a note is removed from a discussion' do
+ before do
+ second_note.destroy!
end
- end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note] }
+ end
end
- end
- context 'when an award emoji is removed from a note in discussion' do
- before do
- travel_to(1.minute.from_now) do
- award_emoji.destroy!
+ context 'when an emoji is awarded to a note in discussion' do
+ before do
+ travel_to(1.minute.from_now) do
+ create(:award_emoji, awardable: first_note)
+ end
end
- end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when the diff note position changes' do
- before do
- # This replicates a position change wherein timestamps aren't updated
- # which is why `Gitlab::Timeless.timeless` is utilized. This is the
- # same approach being used in Discussions::UpdateDiffPositionService
- # which is responsible for updating the positions of diff discussions
- # when MR updates.
- first_note.position = Gitlab::Diff::Position.new(
- old_path: first_note.position.old_path,
- new_path: first_note.position.new_path,
- old_line: first_note.position.old_line,
- new_line: first_note.position.new_line + 1,
- diff_refs: first_note.position.diff_refs
- )
-
- Gitlab::Timeless.timeless(first_note, &:save)
- end
+ context 'when an award emoji is removed from a note in discussion' do
+ before do
+ travel_to(1.minute.from_now) do
+ award_emoji.destroy!
+ end
+ end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when the HEAD diff note position changes' do
- before do
- # This replicates a DiffNotePosition change. This is the same approach
- # being used in Discussions::CaptureDiffNotePositionService which is
- # responsible for updating/creating DiffNotePosition of a diff discussions
- # in relation to HEAD diff.
- new_position = Gitlab::Diff::Position.new(
- old_path: first_note.position.old_path,
- new_path: first_note.position.new_path,
- old_line: first_note.position.old_line,
- new_line: first_note.position.new_line + 1,
- diff_refs: first_note.position.diff_refs
- )
-
- DiffNotePosition.create_or_update_for(
- first_note,
- diff_type: :head,
- position: new_position,
- line_code: 'bd4b7bfff3a247ccf6e3371c41ec018a55230bcc_534_521'
- )
- end
+ context 'when the diff note position changes' do
+ before do
+ # This replicates a position change wherein timestamps aren't updated
+ # which is why `Gitlab::Timeless.timeless` is utilized. This is the
+ # same approach being used in Discussions::UpdateDiffPositionService
+ # which is responsible for updating the positions of diff discussions
+ # when MR updates.
+ first_note.position = Gitlab::Diff::Position.new(
+ old_path: first_note.position.old_path,
+ new_path: first_note.position.new_path,
+ old_line: first_note.position.old_line,
+ new_line: first_note.position.new_line + 1,
+ diff_refs: first_note.position.diff_refs
+ )
+
+ Gitlab::Timeless.timeless(first_note, &:save)
+ end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when author detail changes' do
- before do
- author.update!(name: "#{author.name} (Updated)")
- end
+ context 'when the HEAD diff note position changes' do
+ before do
+ # This replicates a DiffNotePosition change. This is the same approach
+ # being used in Discussions::CaptureDiffNotePositionService which is
+ # responsible for updating/creating DiffNotePosition of a diff discussions
+ # in relation to HEAD diff.
+ new_position = Gitlab::Diff::Position.new(
+ old_path: first_note.position.old_path,
+ new_path: first_note.position.new_path,
+ old_line: first_note.position.old_line,
+ new_line: first_note.position.new_line + 1,
+ diff_refs: first_note.position.diff_refs
+ )
+
+ DiffNotePosition.create_or_update_for(
+ first_note,
+ diff_type: :head,
+ position: new_position,
+ line_code: 'bd4b7bfff3a247ccf6e3371c41ec018a55230bcc_534_521'
+ )
+ end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when author status changes' do
- before do
- Users::SetStatusService.new(author, message: "updated status").execute
+ context 'when author detail changes' do
+ before do
+ author.update!(name: "#{author.name} (Updated)")
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ context 'when author status changes' do
+ before do
+ Users::SetStatusService.new(author, message: "updated status").execute
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- end
- context 'when author role changes' do
- before do
- Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(author_membership)
+ context 'when author role changes' do
+ before do
+ Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(author_membership)
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
+ context 'when current_user role changes' do
+ before do
+ Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(project.member(user))
+ end
+
+ it_behaves_like 'cache miss' do
+ let(:changed_notes) { [first_note, second_note] }
+ end
end
end
- context 'when current_user role changes' do
+ context 'when disabled_mr_discussions_redis_cache is disabled' do
before do
- Members::UpdateService.new(owner, access_level: Gitlab::Access::GUEST).execute(project.member(user))
+ stub_feature_flags(disabled_mr_discussions_redis_cache: false)
+ send_request
end
- it_behaves_like 'cache miss' do
- let(:changed_notes) { [first_note, second_note] }
- end
+ it_behaves_like 'cache hit'
end
end
end
diff --git a/spec/requests/projects/settings/packages_and_registries_controller_spec.rb b/spec/requests/projects/settings/packages_and_registries_controller_spec.rb
new file mode 100644
index 00000000000..6d8a152c769
--- /dev/null
+++ b/spec/requests/projects/settings/packages_and_registries_controller_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Settings::PackagesAndRegistriesController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, namespace: user.namespace) }
+
+ let(:container_registry_enabled) { true }
+ let(:container_registry_enabled_on_project) { ProjectFeature::ENABLED }
+
+ before do
+ project.project_feature.update!(container_registry_access_level: container_registry_enabled_on_project)
+ project.container_expiration_policy.update!(enabled: true)
+
+ stub_container_registry_config(enabled: container_registry_enabled)
+ end
+
+ describe 'GET #cleanup_tags' do
+ subject { get cleanup_image_tags_namespace_project_settings_packages_and_registries_path(user.namespace, project) }
+
+ context 'when user is unauthorized' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ project.add_reporter(user)
+ sign_in(user)
+ subject
+ end
+
+ it 'shows 404' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when user is authorized' do
+ let(:user) { project.creator }
+
+ before do
+ sign_in(user)
+ subject
+ end
+
+ it 'renders content' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:cleanup_tags)
+ end
+
+ context 'when registry is disabled' do
+ let(:container_registry_enabled) { false }
+
+ it 'shows 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'when container registry is disabled on project' do
+ let(:container_registry_enabled_on_project) { ProjectFeature::DISABLED }
+
+ it 'shows 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 115f78a5600..f6b9bc527ac 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -104,8 +104,8 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
context 'with the token in the OAuth headers' do
- let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
- let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, bearer_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, bearer_headers(other_user_token)) }
it_behaves_like 'rate-limited user based token-authenticated requests'
end
@@ -131,8 +131,8 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
context 'with the token in the OAuth headers' do
- let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(token)) }
- let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_token)) }
+ let(:request_args) { api_get_args_with_token_headers(api_partial_url, bearer_headers(token)) }
+ let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, bearer_headers(other_user_token)) }
it_behaves_like 'rate-limited user based token-authenticated requests'
end
@@ -1189,7 +1189,7 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
it 'request is authenticated by token in the OAuth headers' do
expect_authenticated_request
- get url, headers: oauth_token_headers(personal_access_token)
+ get url, headers: bearer_headers(personal_access_token)
end
it 'request is authenticated by token in basic auth' do
@@ -1206,7 +1206,7 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
it 'request is authenticated by token in query string' do
expect_authenticated_request
- get url, params: { access_token: oauth_token.token }
+ get url, params: { access_token: oauth_token.plaintext_token }
end
it 'request is authenticated by token in the headers' do
diff --git a/spec/requests/users/namespace_callouts_spec.rb b/spec/requests/users/namespace_callouts_spec.rb
new file mode 100644
index 00000000000..5a4e269eefb
--- /dev/null
+++ b/spec/requests/users/namespace_callouts_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Namespace callouts' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'POST /-/users/namespace_callouts' do
+ let(:params) { { feature_name: feature_name, namespace_id: user.namespace.id } }
+
+ subject { post namespace_callouts_path, params: params, headers: { 'ACCEPT' => 'application/json' } }
+
+ context 'with valid feature name and group' do
+ let(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
+
+ context 'when callout entry does not exist' do
+ it 'creates a callout entry with dismissed state' do
+ expect { subject }.to change { Users::NamespaceCallout.count }.by(1)
+ end
+
+ it 'returns success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when callout entry already exists' do
+ let!(:callout) do
+ create(:namespace_callout,
+ feature_name: Users::GroupCallout.feature_names.each_key.first,
+ user: user,
+ namespace: user.namespace)
+ end
+
+ it 'returns success', :aggregate_failures do
+ expect { subject }.not_to change { Users::NamespaceCallout.count }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'with invalid feature name' do
+ let(:feature_name) { 'bogus_feature_name' }
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+end
diff --git a/spec/requests/users/project_callouts_spec.rb b/spec/requests/users/project_callouts_spec.rb
new file mode 100644
index 00000000000..98c00fef052
--- /dev/null
+++ b/spec/requests/users/project_callouts_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project callouts' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'POST /-/users/project_callouts' do
+ let(:params) { { feature_name: feature_name, project_id: project.id } }
+
+ subject { post project_callouts_path, params: params, headers: { 'ACCEPT' => 'application/json' } }
+
+ context 'with valid feature name and project' do
+ let(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ context 'when callout entry does not exist' do
+ it 'creates a callout entry with dismissed state' do
+ expect { subject }.to change { Users::ProjectCallout.count }.by(1)
+ end
+
+ it 'returns success' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
+ context 'when callout entry already exists' do
+ let!(:callout) do
+ create(:project_callout,
+ feature_name: Users::ProjectCallout.feature_names.each_key.first,
+ user: user,
+ project: project)
+ end
+
+ it 'returns success', :aggregate_failures do
+ expect { subject }.not_to change { Users::ProjectCallout.count }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
+ context 'with invalid feature name' do
+ let(:feature_name) { 'bogus_feature_name' }
+
+ it 'returns bad request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index 1d58a31bd6e..f701dd9c488 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -772,6 +772,16 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::Settings::PackagesAndRegistriesController, 'routing' do
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/settings/packages_and_registries')).to route_to('projects/settings/packages_and_registries#show', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+
+ it 'to #cleanup_tags' do
+ expect(get('gitlab/gitlabhq/-/settings/packages_and_registries/cleanup_image_tags')).to route_to('projects/settings/packages_and_registries#cleanup_tags', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+ end
+
describe Projects::Settings::IntegrationsController, 'routing' do
it 'to #index' do
expect(get('/gitlab/gitlabhq/-/settings/integrations')).to route_to('projects/settings/integrations#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
@@ -953,12 +963,6 @@ RSpec.describe 'project routing' do
end
end
- describe Projects::Ci::SecureFilesController, 'routing' do
- it 'to #show' do
- expect(get('/gitlab/gitlabhq/-/ci/secure_files')).to route_to('projects/ci/secure_files#show', namespace_id: 'gitlab', project_id: 'gitlabhq')
- end
- end
-
context 'with a non-existent project' do
it 'routes to 404 with get request' do
expect(get: "/gitlab/not_exist").to route_to(
diff --git a/spec/rubocop/code_reuse_helpers_spec.rb b/spec/rubocop/code_reuse_helpers_spec.rb
index d437ada85ee..0d06d37d67a 100644
--- a/spec/rubocop/code_reuse_helpers_spec.rb
+++ b/spec/rubocop/code_reuse_helpers_spec.rb
@@ -152,6 +152,26 @@ RSpec.describe RuboCop::CodeReuseHelpers do
end
end
+ describe '#in_graphql?' do
+ it 'returns true for a node in the FOSS GraphQL directory' do
+ node = build_and_parse_source('10', rails_root_join('app', 'graphql', 'foo.rb'))
+
+ expect(cop.in_graphql?(node)).to eq(true)
+ end
+
+ it 'returns true for a node in the EE GraphQL directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'app', 'graphql', 'foo.rb'))
+
+ expect(cop.in_graphql?(node)).to eq(true)
+ end
+
+ it 'returns false for a node outside the GraphQL directory' do
+ node = build_and_parse_source('10', rails_root_join('app', 'foo', 'foo.rb'))
+
+ expect(cop.in_graphql?(node)).to eq(false)
+ end
+ end
+
describe '#in_graphql_types?' do
%w[
app/graphql/types
@@ -169,7 +189,7 @@ RSpec.describe RuboCop::CodeReuseHelpers do
app/graphql/resolvers
app/foo
].each do |path|
- it "returns true for a node in #{path}" do
+ it "returns false for a node in #{path}" do
node = build_and_parse_source('10', rails_root_join(path, 'foo.rb'))
expect(cop.in_graphql_types?(node)).to eq(false)
@@ -255,6 +275,44 @@ RSpec.describe RuboCop::CodeReuseHelpers do
end
end
+ describe '#in_graphql_directory?' do
+ it 'returns true for a directory in the FOSS app/graphql directory' do
+ node = build_and_parse_source('10', rails_root_join('app', 'graphql', 'subdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(true)
+ end
+
+ it 'returns true for a directory in the EE app/graphql directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'app', 'graphql', 'subdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(true)
+ end
+
+ it 'returns true for a directory in the EE app/graphql/ee directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'app', 'graphql', 'ee', 'subdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(true)
+ end
+
+ it 'returns false for a directory in the FOSS app/graphql directory' do
+ node = build_and_parse_source('10', rails_root_join('app', 'graphql', 'anotherdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(false)
+ end
+
+ it 'returns false for a directory in the EE app/graphql directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'app', 'graphql', 'anotherdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(false)
+ end
+
+ it 'returns false for a directory in the EE app/graphql/ee directory' do
+ node = build_and_parse_source('10', rails_root_join('ee', 'app', 'graphql', 'ee', 'anotherdir', 'foo.rb'))
+
+ expect(cop.in_graphql_directory?(node, 'subdir')).to eq(false)
+ end
+ end
+
describe '#name_of_receiver' do
it 'returns the name of a send receiver' do
node = build_and_parse_source('Foo.bar')
diff --git a/spec/rubocop/cop/code_reuse/worker_spec.rb b/spec/rubocop/cop/code_reuse/worker_spec.rb
index 8155791a3e3..a548e90d8e1 100644
--- a/spec/rubocop/cop/code_reuse/worker_spec.rb
+++ b/spec/rubocop/cop/code_reuse/worker_spec.rb
@@ -31,7 +31,24 @@ RSpec.describe RuboCop::Cop::CodeReuse::Worker do
resource :projects do
get '/' do
FooWorker.perform_async
- ^^^^^^^^^^^^^^^^^^^^^^^ Workers can not be used in a Grape API.
+ ^^^^^^^^^^^^^^^^^^^^^^^ Workers can not be used in an API endpoint.
+ end
+ end
+ end
+ SOURCE
+ end
+
+ it 'flags the use of a worker in GraphQL' do
+ allow(cop)
+ .to receive(:in_graphql?)
+ .and_return(true)
+
+ expect_offense(<<~SOURCE)
+ module Mutations
+ class Foo < BaseMutation
+ def resolve
+ FooWorker.perform_async
+ ^^^^^^^^^^^^^^^^^^^^^^^ Workers can not be used in an API endpoint.
end
end
end
diff --git a/spec/rubocop/cop/gemspec/avoid_executing_git_spec.rb b/spec/rubocop/cop/gemspec/avoid_executing_git_spec.rb
new file mode 100644
index 00000000000..f94a990a2f7
--- /dev/null
+++ b/spec/rubocop/cop/gemspec/avoid_executing_git_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/gemspec/avoid_executing_git'
+
+RSpec.describe RuboCop::Cop::Gemspec::AvoidExecutingGit do
+ subject(:cop) { described_class.new }
+
+ it 'flags violation for executing git' do
+ expect_offense(<<~RUBY)
+ Gem::Specification.new do |gem|
+ gem.executable = `git ls-files -- bin/*`.split("\\n").map{ |f| File.basename(f) }
+ ^^^^^^^^^^^^^^^^^^^^^^^ Do not execute `git` in gemspec.
+ gem.files = `git ls-files`.split("\\n")
+ ^^^^^^^^^^^^^^ Do not execute `git` in gemspec.
+ gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\\n")
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Do not execute `git` in gemspec.
+ end
+ RUBY
+ end
+
+ it 'does not flag violation for using a glob' do
+ expect_no_offenses(<<~RUBY)
+ Gem::Specification.new do |gem|
+ gem.files = Dir.glob("lib/**/*.*")
+ gem.test_files = Dir.glob("spec/**/**/*.*")
+ end
+ RUBY
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/deprecate_track_redis_hll_event_spec.rb b/spec/rubocop/cop/gitlab/deprecate_track_redis_hll_event_spec.rb
new file mode 100644
index 00000000000..453f0c36c14
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/deprecate_track_redis_hll_event_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/gitlab/deprecate_track_redis_hll_event'
+
+RSpec.describe RuboCop::Cop::Gitlab::DeprecateTrackRedisHLLEvent do
+ subject(:cop) { described_class.new }
+
+ it 'does not flag the use of track_event' do
+ expect_no_offenses('track_event :show, name: "p_analytics_insights"')
+ end
+
+ it 'flags the use of track_redis_hll_event' do
+ expect_offense(<<~SOURCE)
+ track_redis_hll_event :show, name: 'p_analytics_valuestream'
+ ^^^^^^^^^^^^^^^^^^^^^ `track_redis_hll_event` is deprecated[...]
+ SOURCE
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
index 2ec3ae7aada..9ab5cdc24a4 100644
--- a/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
+++ b/spec/rubocop/cop/gitlab/mark_used_feature_flags_spec.rb
@@ -217,8 +217,8 @@ RSpec.describe RuboCop::Cop::Gitlab::MarkUsedFeatureFlags do
allow(cop).to receive(:in_graphql_types?).and_return(true)
end
- include_examples 'sets flag as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, feature_flag: :foo', 'foo'
- include_examples 'sets flag as used', 'field :runners, null: true, feature_flag: :foo', 'foo'
+ include_examples 'sets flag as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, _deprecated_feature_flag: :foo', 'foo'
+ include_examples 'sets flag as used', 'field :runners, null: true, _deprecated_feature_flag: :foo', 'foo'
include_examples 'does not set any flags as used', 'field :solution'
include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type'
include_examples 'does not set any flags as used', 'field :runners, Types::Ci::RunnerType.connection_type, null: true, description: "hello world"'
diff --git a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
index 962efc23453..3596badc599 100644
--- a/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
+++ b/spec/rubocop/cop/inject_enterprise_edition_module_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe RuboCop::Cop::InjectEnterpriseEditionModule do
end
SOURCE
end
+
it 'flags the use of `extend_mod_with` in the middle of a file' do
expect_offense(<<~SOURCE)
class Foo
diff --git a/spec/rubocop/cop_todo_spec.rb b/spec/rubocop/cop_todo_spec.rb
new file mode 100644
index 00000000000..978df2c01ee
--- /dev/null
+++ b/spec/rubocop/cop_todo_spec.rb
@@ -0,0 +1,124 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../rubocop/cop_todo'
+
+RSpec.describe RuboCop::CopTodo do
+ let(:cop_name) { 'Cop/Rule' }
+
+ subject(:cop_todo) { described_class.new(cop_name) }
+
+ describe '#initialize' do
+ it 'initializes a cop todo' do
+ expect(cop_todo).to have_attributes(
+ cop_name: cop_name,
+ files: be_empty,
+ offense_count: 0,
+ previously_disabled: false
+ )
+ end
+ end
+
+ describe '#record' do
+ it 'records offenses' do
+ cop_todo.record('a.rb', 1)
+ cop_todo.record('b.rb', 2)
+
+ expect(cop_todo).to have_attributes(
+ files: contain_exactly('a.rb', 'b.rb'),
+ offense_count: 3
+ )
+ end
+ end
+
+ describe '#autocorrectable?' do
+ subject { cop_todo.autocorrectable? }
+
+ context 'when found in rubocop registry' do
+ before do
+ fake_cop = double(:cop, support_autocorrect?: autocorrectable) # rubocop:disable RSpec/VerifiedDoubles
+
+ allow(described_class).to receive(:find_cop_by_name)
+ .with(cop_name).and_return(fake_cop)
+ end
+
+ context 'when autocorrectable' do
+ let(:autocorrectable) { true }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when not autocorrectable' do
+ let(:autocorrectable) { false }
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when not found in rubocop registry' do
+ before do
+ allow(described_class).to receive(:find_cop_by_name)
+ .with(cop_name).and_return(nil).and_call_original
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ describe '#to_yaml' do
+ subject(:yaml) { cop_todo.to_yaml }
+
+ context 'when autocorrectable' do
+ before do
+ allow(cop_todo).to receive(:autocorrectable?).and_return(true)
+ end
+
+ specify do
+ expect(yaml).to eq(<<~YAML)
+ ---
+ # Cop supports --auto-correct.
+ #{cop_name}:
+ Exclude:
+ YAML
+ end
+ end
+
+ context 'when previously disabled' do
+ specify do
+ cop_todo.record('a.rb', 1)
+ cop_todo.record('b.rb', 2)
+ cop_todo.previously_disabled = true
+
+ expect(yaml).to eq(<<~YAML)
+ ---
+ #{cop_name}:
+ # Offense count: 3
+ # Temporarily disabled due to too many offenses
+ Enabled: false
+ Exclude:
+ - 'a.rb'
+ - 'b.rb'
+ YAML
+ end
+ end
+
+ context 'with multiple files' do
+ before do
+ cop_todo.record('a.rb', 0)
+ cop_todo.record('c.rb', 0)
+ cop_todo.record('b.rb', 0)
+ end
+
+ it 'sorts excludes alphabetically' do
+ expect(yaml).to eq(<<~YAML)
+ ---
+ #{cop_name}:
+ Exclude:
+ - 'a.rb'
+ - 'b.rb'
+ - 'c.rb'
+ YAML
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/formatter/todo_formatter_spec.rb b/spec/rubocop/formatter/todo_formatter_spec.rb
index fcff028f07d..df56ee45931 100644
--- a/spec/rubocop/formatter/todo_formatter_spec.rb
+++ b/spec/rubocop/formatter/todo_formatter_spec.rb
@@ -261,16 +261,12 @@ RSpec.describe RuboCop::Formatter::TodoFormatter do
double(:offense, cop_name: cop_name)
end
- def stub_rubocop_registry(**cops)
- rubocop_registry = double(:rubocop_registry)
-
- allow(RuboCop::Cop::Registry).to receive(:global).and_return(rubocop_registry)
-
- allow(rubocop_registry).to receive(:find_by_cop_name)
- .with(String).and_return(nil)
+ def stub_rubocop_registry(cops)
+ allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
+ .with(String).and_return(nil).and_call_original
cops.each do |cop_name, attributes|
- allow(rubocop_registry).to receive(:find_by_cop_name)
+ allow(RuboCop::CopTodo).to receive(:find_cop_by_name)
.with(cop_name).and_return(fake_cop(**attributes))
end
end
diff --git a/spec/scripts/changed-feature-flags_spec.rb b/spec/scripts/changed-feature-flags_spec.rb
index bbae49a90e4..f4058614d85 100644
--- a/spec/scripts/changed-feature-flags_spec.rb
+++ b/spec/scripts/changed-feature-flags_spec.rb
@@ -81,8 +81,8 @@ RSpec.describe 'scripts/changed-feature-flags' do
end
describe '.extracted_flags' do
- let(:file_name1) { "foo_ff_#{SecureRandom.hex(8)}"}
- let(:file_name2) { "bar_ff_#{SecureRandom.hex(8)}"}
+ let(:file_name1) { "foo_ff_#{SecureRandom.hex(8)}" }
+ let(:file_name2) { "bar_ff_#{SecureRandom.hex(8)}" }
let(:ff_dir) { FileUtils.mkdir_p(File.join(Dir.tmpdir, ff_sub_dir)) }
let(:diffs_dir) { FileUtils.mkdir_p(File.join(Dir.tmpdir, 'diffs')).first }
diff --git a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
index 149a384d31e..fe815aa6f1e 100644
--- a/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
+++ b/spec/scripts/lib/glfm/update_example_snapshots_spec.rb
@@ -65,13 +65,19 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
## Strong
+ This example doesn't have an extension after the `example` keyword, so its
+ `source_specification` will be `commonmark`.
+
```````````````````````````````` example
__bold__
.
<p><strong>bold</strong></p>
````````````````````````````````
- ```````````````````````````````` example strong
+ This example has an extension after the `example` keyword, so its
+ `source_specification` will be `github`.
+
+ ```````````````````````````````` example some_extension_name
__bold with more text__
.
<p><strong>bold with more text</strong></p>
@@ -132,6 +138,10 @@ RSpec.describe Glfm::UpdateExampleSnapshots, '#process' do
## Strong but with HTML
+ This example has the `gitlab` keyword after the `example` keyword, so its
+ `source_specification` will be `gitlab`.
+
+
```````````````````````````````` example gitlab strong
<strong>
bold
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index 76a3cdbeaa2..d0f1d3dc41b 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -882,7 +882,6 @@ RSpec.describe Trigger do
let(:ops_gitlab_client) { double('ops_gitlab_client') }
let(:downstream_gitlab_client_endpoint) { ops_api_endpoint }
- let(:downstream_gitlab_client_token) { ops_api_token }
let(:downstream_gitlab_client) { ops_gitlab_client }
let(:ref) { 'master' }
@@ -890,7 +889,6 @@ RSpec.describe Trigger do
let(:env) do
super().merge(
- 'GITLABCOM_DATABASE_TESTING_ACCESS_TOKEN' => ops_api_token,
'GITLABCOM_DATABASE_TESTING_TRIGGER_TOKEN' => trigger_token
)
end
@@ -902,6 +900,13 @@ RSpec.describe Trigger do
private_token: com_api_token
)
.and_return(com_gitlab_client)
+
+ allow(Gitlab).to receive(:client)
+ .with(
+ endpoint: downstream_gitlab_client_endpoint
+ )
+ .and_return(downstream_gitlab_client)
+
allow(com_gitlab_client).to receive(:merge_request_notes)
.with(
env['CI_PROJECT_PATH'],
diff --git a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
index c39eb14e339..7ea72351594 100644
--- a/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_keys/basic_deploy_key_entity_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe DeployKeys::BasicDeployKeyEntity do
include RequestAwareEntity
let(:user) { create(:user) }
- let(:project) { create(:project, :internal)}
- let(:project_private) { create(:project, :private)}
+ let(:project) { create(:project, :internal) }
+ let(:project_private) { create(:project, :private) }
let(:deploy_key) { create(:deploy_key) }
let(:options) { { user: user } }
diff --git a/spec/serializers/deploy_keys/deploy_key_entity_spec.rb b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb
index e989aa8656c..7719cafae11 100644
--- a/spec/serializers/deploy_keys/deploy_key_entity_spec.rb
+++ b/spec/serializers/deploy_keys/deploy_key_entity_spec.rb
@@ -6,8 +6,8 @@ RSpec.describe DeployKeys::DeployKeyEntity do
include RequestAwareEntity
let(:user) { create(:user) }
- let(:project) { create(:project, :internal)}
- let(:project_private) { create(:project, :private)}
+ let(:project) { create(:project, :internal) }
+ let(:project_private) { create(:project, :private) }
let(:deploy_key) { create(:deploy_key) }
let(:options) { { user: user } }
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 05644dad151..01d1e47b5bb 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -101,6 +101,37 @@ RSpec.describe EnvironmentSerializer do
expect(subject.third[:latest][:environment_type]).to be_nil
end
end
+
+ context 'when folders and standalone environments share the same name' do
+ before do
+ create(:environment, project: project, name: 'staging/my-review-1')
+ create(:environment, project: project, name: 'staging/my-review-2')
+ create(:environment, project: project, name: 'production/my-review-3')
+ create(:environment, project: project, name: 'staging')
+ create(:environment, project: project, name: 'testing')
+ end
+
+ it 'does not group standalone environments with folders that have the same name' do
+ expect(subject.count).to eq 4
+
+ expect(subject.first[:name]).to eq 'production'
+ expect(subject.first[:size]).to eq 1
+ expect(subject.first[:latest][:name]).to eq 'production/my-review-3'
+ expect(subject.first[:latest][:environment_type]).to eq 'production'
+ expect(subject.second[:name]).to eq 'staging'
+ expect(subject.second[:size]).to eq 1
+ expect(subject.second[:latest][:name]).to eq 'staging'
+ expect(subject.second[:latest][:environment_type]).to be_nil
+ expect(subject.third[:name]).to eq 'staging'
+ expect(subject.third[:size]).to eq 2
+ expect(subject.third[:latest][:name]).to eq 'staging/my-review-2'
+ expect(subject.third[:latest][:environment_type]).to eq 'staging'
+ expect(subject.fourth[:name]).to eq 'testing'
+ expect(subject.fourth[:size]).to eq 1
+ expect(subject.fourth[:latest][:name]).to eq 'testing'
+ expect(subject.fourth[:latest][:environment_type]).to be_nil
+ end
+ end
end
context 'when used with pagination' do
diff --git a/spec/serializers/group_access_token_entity_spec.rb b/spec/serializers/group_access_token_entity_spec.rb
new file mode 100644
index 00000000000..39b587c7df7
--- /dev/null
+++ b/spec/serializers/group_access_token_entity_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupAccessTokenEntity do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bot) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: bot) }
+
+ subject(:json) { described_class.new(token, group: group).as_json }
+
+ context 'when bot is a member of the group' do
+ before do
+ group.add_developer(bot)
+ end
+
+ it 'has the correct attributes' do
+ expected_revoke_path = Gitlab::Routing.url_helpers
+ .revoke_group_settings_access_token_path(
+ { id: token,
+ group_id: group.path })
+
+ expect(json).to(
+ include(
+ id: token.id,
+ name: token.name,
+ scopes: token.scopes,
+ user_id: token.user_id,
+ revoke_path: expected_revoke_path,
+ access_level: ::Gitlab::Access::DEVELOPER
+ ))
+
+ expect(json).not_to include(:token)
+ end
+ end
+
+ context 'when bot is unrelated to the group' do
+ it 'has the correct attributes' do
+ expected_revoke_path = Gitlab::Routing.url_helpers
+ .revoke_group_settings_access_token_path(
+ { id: token,
+ group_id: group.path })
+
+ expect(json).to(
+ include(
+ id: token.id,
+ name: token.name,
+ scopes: token.scopes,
+ user_id: token.user_id,
+ revoke_path: expected_revoke_path,
+ access_level: nil
+ ))
+
+ expect(json).not_to include(:token)
+ end
+ end
+end
diff --git a/spec/serializers/group_access_token_serializer_spec.rb b/spec/serializers/group_access_token_serializer_spec.rb
new file mode 100644
index 00000000000..3b12c3115c9
--- /dev/null
+++ b/spec/serializers/group_access_token_serializer_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GroupAccessTokenSerializer do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bot) { create(:user, :project_bot) }
+
+ subject(:serializer) { described_class.new }
+
+ before do
+ group.add_developer(bot)
+ end
+
+ describe '#represent' do
+ it 'can render a single token' do
+ token = create(:personal_access_token, user: bot)
+
+ expect(serializer.represent(token, group: group)).to be_kind_of(Hash)
+ end
+
+ it 'can render a collection of tokens' do
+ tokens = create_list(:personal_access_token, 2, user: bot)
+
+ expect(serializer.represent(tokens, group: group)).to be_kind_of(Array)
+ end
+ end
+end
diff --git a/spec/serializers/integrations/project_entity_spec.rb b/spec/serializers/integrations/project_entity_spec.rb
index 1564f7fad63..ac633d1d5c6 100644
--- a/spec/serializers/integrations/project_entity_spec.rb
+++ b/spec/serializers/integrations/project_entity_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Integrations::ProjectEntity do
it 'contains needed attributes' do
expect(subject).to include(
+ id: project.id,
avatar_url: include('uploads'),
name: project.name,
full_path: project_path(project),
diff --git a/spec/serializers/issue_entity_spec.rb b/spec/serializers/issue_entity_spec.rb
index 6b9c703c627..9335ca61b7d 100644
--- a/spec/serializers/issue_entity_spec.rb
+++ b/spec/serializers/issue_entity_spec.rb
@@ -39,6 +39,13 @@ RSpec.describe IssueEntity do
expect(subject).to include(:time_estimate, :total_time_spent, :human_time_estimate, :human_total_time_spent)
end
+ describe 'current_user' do
+ it 'has the exprected permissions' do
+ expect(subject[:current_user]).to include(:can_create_note, :can_update, :can_set_issue_metadata,
+ :can_award_emoji)
+ end
+ end
+
context 'when issue got moved' do
let(:public_project) { create(:project, :public) }
let(:member) { create(:user) }
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 409585e52f1..90a82d16e38 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -184,38 +184,8 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
describe '#mergeable_discussions_state?' do
- context 'when change_response_code_merge_status is true' do
- before do
- stub_feature_flags(change_response_code_merge_status: true)
- end
-
- it 'returns mergeable discussions state' do
- expect(subject[:mergeable_discussions_state]).to eq(true)
- end
- end
-
- context 'when change_response_code_merge_status is false' do
- context 'when merge request is in a mergeable state' do
- before do
- stub_feature_flags(change_response_code_merge_status: false)
- allow(resource).to receive(:mergeable_discussions_state?).and_return(true)
- end
-
- it 'returns mergeable discussions state' do
- expect(subject[:mergeable_discussions_state]).to eq(true)
- end
- end
-
- context 'when merge request is not in a mergeable state' do
- before do
- stub_feature_flags(change_response_code_merge_status: false)
- allow(resource).to receive(:mergeable_state?).and_return(false)
- end
-
- it 'returns mergeable discussions state' do
- expect(subject[:mergeable_discussions_state]).to eq(false)
- end
- end
+ it 'returns mergeable discussions state' do
+ expect(subject[:mergeable_discussions_state]).to eq(true)
end
end
end
diff --git a/spec/serializers/merge_request_user_entity_spec.rb b/spec/serializers/merge_request_user_entity_spec.rb
index 7877356ff0f..5c7120ab6a4 100644
--- a/spec/serializers/merge_request_user_entity_spec.rb
+++ b/spec/serializers/merge_request_user_entity_spec.rb
@@ -18,8 +18,7 @@ RSpec.describe MergeRequestUserEntity do
it 'exposes needed attributes' do
is_expected.to include(
:id, :name, :username, :state, :avatar_url, :web_url,
- :can_merge, :can_update_merge_request, :reviewed, :approved,
- :attention_requested
+ :can_merge, :can_update_merge_request, :reviewed, :approved
)
end
@@ -57,14 +56,6 @@ RSpec.describe MergeRequestUserEntity do
end
end
- context 'attention_requested' do
- before do
- merge_request.find_assignee(user).update!(state: :attention_requested)
- end
-
- it { is_expected.to include(attention_requested: true ) }
- end
-
describe 'performance' do
let_it_be(:user_a) { create(:user) }
let_it_be(:user_b) { create(:user) }
diff --git a/spec/serializers/personal_access_token_entity_spec.rb b/spec/serializers/personal_access_token_entity_spec.rb
new file mode 100644
index 00000000000..8a77a4e0036
--- /dev/null
+++ b/spec/serializers/personal_access_token_entity_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PersonalAccessTokenEntity do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:token) { create(:personal_access_token, user: user) }
+
+ subject(:json) { described_class.new(token).as_json }
+
+ it 'has the correct attributes' do
+ expected_revoke_path = Gitlab::Routing.url_helpers
+ .revoke_profile_personal_access_token_path(
+ { id: token })
+
+ expect(json).to(
+ include(
+ id: token.id,
+ name: token.name,
+ scopes: token.scopes,
+ user_id: token.user_id,
+ revoke_path: expected_revoke_path
+ ))
+
+ expect(json).not_to include(:token)
+ end
+end
diff --git a/spec/serializers/personal_access_token_serializer_spec.rb b/spec/serializers/personal_access_token_serializer_spec.rb
new file mode 100644
index 00000000000..e4adc6abccb
--- /dev/null
+++ b/spec/serializers/personal_access_token_serializer_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe PersonalAccessTokenSerializer do
+ subject(:serializer) { described_class.new }
+
+ describe '#represent' do
+ it 'can render a single token' do
+ token = create(:personal_access_token)
+
+ expect(serializer.represent(token)).to be_kind_of(Hash)
+ end
+
+ it 'can render a collection of tokens' do
+ tokens = create_list(:personal_access_token, 2)
+
+ expect(serializer.represent(tokens)).to be_kind_of(Array)
+ end
+ end
+end
diff --git a/spec/serializers/project_access_token_entity_spec.rb b/spec/serializers/project_access_token_entity_spec.rb
new file mode 100644
index 00000000000..616aa45e9d5
--- /dev/null
+++ b/spec/serializers/project_access_token_entity_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectAccessTokenEntity do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:bot) { create(:user, :project_bot) }
+ let_it_be(:token) { create(:personal_access_token, user: bot) }
+
+ subject(:json) { described_class.new(token, project: project).as_json }
+
+ context 'when bot is a member of the project' do
+ before do
+ project.add_developer(bot)
+ end
+
+ it 'has the correct attributes' do
+ expected_revoke_path = Gitlab::Routing.url_helpers
+ .revoke_namespace_project_settings_access_token_path(
+ { id: token,
+ namespace_id: project.namespace.path,
+ project_id: project.path })
+
+ expect(json).to(
+ include(
+ id: token.id,
+ name: token.name,
+ scopes: token.scopes,
+ user_id: token.user_id,
+ revoke_path: expected_revoke_path,
+ access_level: ::Gitlab::Access::DEVELOPER
+ ))
+
+ expect(json).not_to include(:token)
+ end
+ end
+
+ context 'when bot is unrelated to the project' do
+ let_it_be(:project) { create(:project) }
+
+ it 'has the correct attributes' do
+ expected_revoke_path = Gitlab::Routing.url_helpers
+ .revoke_namespace_project_settings_access_token_path(
+ { id: token,
+ namespace_id: project.namespace.path,
+ project_id: project.path })
+
+ expect(json).to(
+ include(
+ id: token.id,
+ name: token.name,
+ scopes: token.scopes,
+ user_id: token.user_id,
+ revoke_path: expected_revoke_path,
+ access_level: nil
+ ))
+
+ expect(json).not_to include(:token)
+ end
+ end
+end
diff --git a/spec/serializers/project_access_token_serializer_spec.rb b/spec/serializers/project_access_token_serializer_spec.rb
new file mode 100644
index 00000000000..1c0898d7841
--- /dev/null
+++ b/spec/serializers/project_access_token_serializer_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectAccessTokenSerializer do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:bot) { create(:user, :project_bot) }
+
+ subject(:serializer) { described_class.new }
+
+ before do
+ project.add_developer(bot)
+ end
+
+ describe '#represent' do
+ it 'can render a single token' do
+ token = create(:personal_access_token, user: bot)
+
+ expect(serializer.represent(token, project: project)).to be_kind_of(Hash)
+ end
+
+ it 'can render a collection of tokens' do
+ tokens = create_list(:personal_access_token, 2, user: bot)
+
+ expect(serializer.represent(tokens, project: project)).to be_kind_of(Array)
+ end
+ end
+end
diff --git a/spec/services/alert_management/process_prometheus_alert_service_spec.rb b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
index 86a6cdee52d..ae52a09be48 100644
--- a/spec/services/alert_management/process_prometheus_alert_service_spec.rb
+++ b/spec/services/alert_management/process_prometheus_alert_service_spec.rb
@@ -44,6 +44,7 @@ RSpec.describe AlertManagement::ProcessPrometheusAlertService do
end
it_behaves_like 'processes new firing alert'
+ include_examples 'handles race condition in alert creation'
context 'with resolving payload' do
let(:prometheus_status) { 'resolved' }
diff --git a/spec/services/audit_events/build_service_spec.rb b/spec/services/audit_events/build_service_spec.rb
new file mode 100644
index 00000000000..caf405a53aa
--- /dev/null
+++ b/spec/services/audit_events/build_service_spec.rb
@@ -0,0 +1,154 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuditEvents::BuildService do
+ let(:author) { build_stubbed(:author, current_sign_in_ip: '127.0.0.1') }
+ let(:deploy_token) { build_stubbed(:deploy_token, user: author) }
+ let(:scope) { build_stubbed(:group) }
+ let(:target) { build_stubbed(:project) }
+ let(:ip_address) { '192.168.8.8' }
+ let(:message) { 'Added an interesting field from project Gotham' }
+ let(:additional_details) { { action: :custom } }
+
+ subject(:service) do
+ described_class.new(
+ author: author,
+ scope: scope,
+ target: target,
+ message: message,
+ additional_details: additional_details,
+ ip_address: ip_address
+ )
+ end
+
+ describe '#execute', :request_store do
+ subject(:event) { service.execute }
+
+ before do
+ allow(Gitlab::RequestContext.instance).to receive(:client_ip).and_return(ip_address)
+ end
+
+ it 'sets correct attributes', :aggregate_failures do
+ freeze_time do
+ expect(event).to have_attributes(
+ author_id: author.id,
+ author_name: author.name,
+ entity_id: scope.id,
+ entity_type: scope.class.name)
+
+ expect(event.details).to eq(
+ author_name: author.name,
+ author_class: author.class.name,
+ target_id: target.id,
+ target_type: target.class.name,
+ target_details: target.name,
+ custom_message: message,
+ action: :custom)
+
+ expect(event.ip_address).to be_nil
+ expect(event.created_at).to eq(DateTime.current)
+ end
+ end
+
+ context 'when IP address is not provided' do
+ let(:ip_address) { nil }
+
+ it 'uses author current_sign_in_ip' do
+ expect(event.ip_address).to be_nil
+ end
+ end
+
+ context 'when overriding target details' do
+ subject(:service) do
+ described_class.new(
+ author: author,
+ scope: scope,
+ target: target,
+ message: message,
+ target_details: "This is my target details"
+ )
+ end
+
+ it 'uses correct target details' do
+ expect(event.target_details).to eq("This is my target details")
+ end
+ end
+
+ context 'when deploy token is passed as author' do
+ let(:service) do
+ described_class.new(
+ author: deploy_token,
+ scope: scope,
+ target: target,
+ message: message
+ )
+ end
+
+ it 'expect author to be user' do
+ expect(event.author_id).to eq(-2)
+ expect(event.author_name).to eq(deploy_token.name)
+ end
+ end
+
+ context 'when deploy key is passed as author' do
+ let(:deploy_key) { build_stubbed(:deploy_key, user: author) }
+
+ let(:service) do
+ described_class.new(
+ author: deploy_key,
+ scope: scope,
+ target: target,
+ message: message
+ )
+ end
+
+ it 'expect author to be deploy key' do
+ expect(event.author_id).to eq(-3)
+ expect(event.author_name).to eq(deploy_key.name)
+ end
+ end
+
+ context 'when author is passed as UnauthenticatedAuthor' do
+ let(:service) do
+ described_class.new(
+ author: ::Gitlab::Audit::UnauthenticatedAuthor.new,
+ scope: scope,
+ target: target,
+ message: message
+ )
+ end
+
+ it 'sets author as unauthenticated user' do
+ expect(event.author).to be_an_instance_of(::Gitlab::Audit::UnauthenticatedAuthor)
+ expect(event.author_name).to eq('An unauthenticated user')
+ end
+ end
+
+ context 'when attributes are missing' do
+ context 'when author is missing' do
+ let(:author) { nil }
+
+ it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ end
+
+ context 'when scope is missing' do
+ let(:scope) { nil }
+
+ it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ end
+
+ context 'when target is missing' do
+ let(:target) { nil }
+
+ it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ end
+
+ context 'when message is missing' do
+ let(:message) { nil }
+
+ it { expect { service }.to raise_error(described_class::MissingAttributeError) }
+ end
+ end
+ end
+end
diff --git a/spec/services/auto_merge/base_service_spec.rb b/spec/services/auto_merge/base_service_spec.rb
index 3f535b83788..6c804a14620 100644
--- a/spec/services/auto_merge/base_service_spec.rb
+++ b/spec/services/auto_merge/base_service_spec.rb
@@ -254,7 +254,7 @@ RSpec.describe AutoMerge::BaseService do
subject { service.abort(merge_request, reason) }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
- let(:reason) { 'an error'}
+ let(:reason) { 'an error' }
it_behaves_like 'Canceled or Dropped'
diff --git a/spec/services/auto_merge_service_spec.rb b/spec/services/auto_merge_service_spec.rb
index 335c608c206..043b413acff 100644
--- a/spec/services/auto_merge_service_spec.rb
+++ b/spec/services/auto_merge_service_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe AutoMergeService do
end
context 'when strategy is not present' do
- let(:strategy) { }
+ let(:strategy) {}
it 'returns nil' do
is_expected.to be_nil
@@ -140,7 +140,7 @@ RSpec.describe AutoMergeService do
end
context 'when strategy is not specified' do
- let(:strategy) { }
+ let(:strategy) {}
it 'chooses the most preferred strategy' do
is_expected.to eq(:merge_when_pipeline_succeeds)
diff --git a/spec/services/branches/create_service_spec.rb b/spec/services/branches/create_service_spec.rb
index 0d2f5838574..26cc1a0665e 100644
--- a/spec/services/branches/create_service_spec.rb
+++ b/spec/services/branches/create_service_spec.rb
@@ -2,17 +2,155 @@
require 'spec_helper'
-RSpec.describe Branches::CreateService do
+RSpec.describe Branches::CreateService, :use_clean_rails_redis_caching do
subject(:service) { described_class.new(project, user) }
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:user) { create(:user) }
+ describe '#bulk_create' do
+ subject { service.bulk_create(branches) }
+
+ let_it_be(:project) { create(:project, :custom_repo, files: { 'foo/a.txt' => 'foo' }) }
+
+ let(:branches) { { 'branch' => 'master', 'another_branch' => 'master' } }
+
+ it 'creates two branches' do
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:branches].map(&:name)).to match_array(%w[branch another_branch])
+
+ expect(project.repository.branch_exists?('branch')).to be_truthy
+ expect(project.repository.branch_exists?('another_branch')).to be_truthy
+ end
+
+ context 'when branches are empty' do
+ let(:branches) { {} }
+
+ it 'is successful' do
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:branches]).to eq([])
+ end
+ end
+
+ context 'when incorrect reference is provided' do
+ let(:branches) { { 'new-feature' => 'unknown' } }
+
+ before do
+ allow(project.repository).to receive(:add_branch).and_return(false)
+ end
+
+ it 'returns an error with a reference name' do
+ err_msg = 'Failed to create branch \'new-feature\': invalid reference name \'unknown\''
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match_array([err_msg])
+ end
+ end
+
+ context 'when branch already exists' do
+ let(:branches) { { 'master' => 'master' } }
+
+ it 'returns an error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match_array(['Branch already exists'])
+ end
+ end
+
+ context 'when an ambiguous branch name is provided' do
+ let(:branches) { { 'ambiguous/test' => 'master', 'ambiguous' => 'master' } }
+
+ it 'returns an error that branch could not be created' do
+ err_msg = 'Failed to create branch \'ambiguous\': 13:reference is ambiguous.'
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match_array([err_msg])
+ end
+ end
+
+ context 'when PreReceiveError exception' do
+ let(:branches) { { 'error' => 'master' } }
+
+ it 'logs and returns an error if there is a PreReceiveError exception' do
+ error_message = 'pre receive error'
+ raw_message = "GitLab: #{error_message}"
+ pre_receive_error = Gitlab::Git::PreReceiveError.new(raw_message)
+
+ allow(project.repository).to receive(:add_branch).and_raise(pre_receive_error)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ pre_receive_error,
+ pre_receive_message: raw_message,
+ branch_name: 'error',
+ ref: 'master'
+ )
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match_array([error_message])
+ end
+ end
+
+ context 'when multiple errors occur' do
+ let(:branches) { { 'master' => 'master', '' => 'master', 'failed_branch' => 'master' } }
+
+ it 'returns all errors' do
+ allow(project.repository).to receive(:add_branch).with(
+ user,
+ 'failed_branch',
+ 'master',
+ expire_cache: false
+ ).and_return(false)
+
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to match_array(
+ [
+ 'Branch already exists',
+ 'Branch name is invalid',
+ "Failed to create branch 'failed_branch': invalid reference name 'master'"
+ ]
+ )
+ end
+ end
+
+ context 'without N+1 for Redis cache' do
+ let(:branches) { { 'branch1' => 'master', 'branch2' => 'master', 'branch3' => 'master' } }
+
+ it 'does not trigger Redis recreation' do
+ project.repository.expire_branches_cache
+
+ control = RedisCommands::Recorder.new(pattern: ':branch_names:') { subject }
+
+ expect(control.by_command(:sadd).count).to eq(1)
+ end
+ end
+
+ context 'without N+1 branch cache expiration' do
+ let(:branches) { { 'branch_1' => 'master', 'branch_2' => 'master', 'branch_3' => 'master' } }
+
+ it 'triggers branch cache expiration only once' do
+ expect(project.repository).to receive(:expire_branches_cache).once
+
+ subject
+ end
+
+ context 'when branches were not added' do
+ let(:branches) { { 'master' => 'master' } }
+
+ it 'does not trigger branch expiration' do
+ expect(project.repository).not_to receive(:expire_branches_cache)
+
+ subject
+ end
+ end
+ end
+ end
+
describe '#execute' do
context 'when repository is empty' do
it 'creates master branch' do
- service.execute('my-feature', 'master')
+ result = service.execute('my-feature', 'master')
+ expect(result[:status]).to eq(:success)
+ expect(result[:branch].name).to eq('my-feature')
expect(project.repository.branch_exists?('master')).to be_truthy
end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index 67ec6fee1ae..4b655dd5d6d 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -10,19 +10,19 @@ RSpec.describe BulkImports::CreateService do
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
- destination_name: 'destination group 1',
+ destination_slug: 'destination group 1',
destination_namespace: 'full/path/to/destination1'
},
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group2',
- destination_name: 'destination group 2',
+ destination_slug: 'destination group 2',
destination_namespace: 'full/path/to/destination2'
},
{
source_type: 'project_entity',
source_full_path: 'full/path/to/project1',
- destination_name: 'destination project 1',
+ destination_slug: 'destination project 1',
destination_namespace: 'full/path/to/destination1'
}
]
diff --git a/spec/services/bulk_imports/file_download_service_spec.rb b/spec/services/bulk_imports/file_download_service_spec.rb
index bd664d6e996..81229cc8431 100644
--- a/spec/services/bulk_imports/file_download_service_spec.rb
+++ b/spec/services/bulk_imports/file_download_service_spec.rb
@@ -136,14 +136,45 @@ RSpec.describe BulkImports::FileDownloadService do
end
context 'when chunk code is not 200' do
- let(:chunk_double) { double('chunk', size: 1000, code: 307) }
+ let(:chunk_double) { double('chunk', size: 1000, code: 500) }
it 'raises an error' do
expect { subject.execute }.to raise_error(
described_class::ServiceError,
- 'File download error 307'
+ 'File download error 500'
)
end
+
+ context 'when chunk code is redirection' do
+ let(:chunk_double) { double('redirection', size: 1000, code: 303) }
+
+ it 'does not write a redirection chunk' do
+ expect { subject.execute }.not_to raise_error
+
+ expect(File.read(filepath)).not_to include('redirection')
+ end
+
+ context 'when redirection chunk appears at a later stage of the download' do
+ it 'raises an error' do
+ another_chunk_double = double('another redirection', size: 1000, code: 303)
+ data_chunk = double('data chunk', size: 1000, code: 200)
+
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:head).and_return(response_double)
+ allow(client)
+ .to receive(:stream)
+ .and_yield(chunk_double)
+ .and_yield(data_chunk)
+ .and_yield(another_chunk_double)
+ end
+
+ expect { subject.execute }.to raise_error(
+ described_class::ServiceError,
+ 'File download error 303'
+ )
+ end
+ end
+ end
end
context 'when file is a symlink' do
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index e3e38aacaa2..7c5bd1db565 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe BulkUpdateIntegrationService do
context 'with integration with data fields' do
let(:excluded_attributes) do
- %w[id service_id created_at updated_at encrypted_properties encrypted_properties_iv]
+ %w[id integration_id created_at updated_at encrypted_properties encrypted_properties_iv]
end
it 'updates the data fields from the integration', :aggregate_failures do
diff --git a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
index 9add096d782..7c698242921 100644
--- a/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
+++ b/spec/services/ci/create_pipeline_service/evaluate_runner_tags_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::CreatePipelineService do
let_it_be(:group) { create(:group, :private) }
- let_it_be(:group_variable) { create(:ci_group_variable, group: group, key: 'RUNNER_TAG', value: 'group')}
+ let_it_be(:group_variable) { create(:ci_group_variable, group: group, key: 'RUNNER_TAG', value: 'group') }
let_it_be(:project) { create(:project, :repository, group: group) }
let_it_be(:user) { create(:user) }
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index 4326fa5533f..cc808b7e61c 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
expect(pipeline.statuses).to match_array [test, bridge]
expect(bridge.options).to eq(expected_bridge_options)
expect(bridge.yaml_variables)
- .to include(key: 'CROSS', value: 'downstream', public: true)
+ .to include(key: 'CROSS', value: 'downstream')
end
end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index d0ce1c5aba8..6e48141226d 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -7,10 +7,38 @@ RSpec.describe Ci::CreatePipelineService do
let(:ref) { 'refs/heads/master' }
let(:source) { :push }
let(:service) { described_class.new(project, user, { ref: ref }) }
- let(:pipeline) { service.execute(source).payload }
+ let(:response) { execute_service }
+ let(:pipeline) { response.payload }
let(:build_names) { pipeline.builds.pluck(:name) }
+ def execute_service(before: '00000000', variables_attributes: nil)
+ params = { ref: ref, before: before, after: project.commit(ref).sha, variables_attributes: variables_attributes }
+
+ described_class
+ .new(project, user, params)
+ .execute(source) do |pipeline|
+ yield(pipeline) if block_given?
+ end
+ end
+
context 'job:rules' do
+ let(:regular_job) { find_job('regular-job') }
+ let(:rules_job) { find_job('rules-job') }
+ let(:delayed_job) { find_job('delayed-job') }
+
+ def find_job(name)
+ pipeline.builds.find_by(name: name)
+ end
+
+ shared_examples 'rules jobs are excluded' do
+ it 'only persists the job without rules' do
+ expect(pipeline).to be_persisted
+ expect(regular_job).to be_persisted
+ expect(rules_job).to be_nil
+ expect(delayed_job).to be_nil
+ end
+ end
+
before do
stub_ci_pipeline_yaml_file(config)
allow_next_instance_of(Ci::BuildScheduleWorker) do |instance|
@@ -95,10 +123,6 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'with allow_failure and exit_codes', :aggregate_failures do
- def find_job(name)
- pipeline.builds.find_by(name: name)
- end
-
let(:config) do
<<-EOY
job-1:
@@ -280,6 +304,773 @@ RSpec.describe Ci::CreatePipelineService do
end
end
end
+
+ context 'with simple if: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ master-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - if: $CI_COMMIT_REF_NAME == "nonexistant-branch"
+ when: never
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ negligible-job:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: true
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: delayed
+ start_in: 1 hour
+
+ never-job:
+ script: "echo Goodbye, World!"
+ rules:
+ - if: $CI_COMMIT_REF_NAME
+ when: never
+ EOY
+ end
+
+ context 'with matches' do
+ it 'creates a pipeline with the vanilla and manual jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly(
+ 'regular-job', 'delayed-job', 'master-job', 'negligible-job'
+ )
+ end
+
+ it 'assigns job:when values to the builds' do
+ expect(find_job('regular-job').when).to eq('on_success')
+ expect(find_job('master-job').when).to eq('manual')
+ expect(find_job('negligible-job').when).to eq('on_success')
+ expect(find_job('delayed-job').when).to eq('delayed')
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('regular-job').allow_failure).to eq(false)
+ expect(find_job('master-job').allow_failure).to eq(false)
+ expect(find_job('negligible-job').allow_failure).to eq(true)
+ expect(find_job('delayed-job').allow_failure).to eq(false)
+ end
+
+ it 'assigns start_in for delayed jobs' do
+ expect(delayed_job.options[:start_in]).to eq('1 hour')
+ end
+ end
+
+ context 'with no matches' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it_behaves_like 'rules jobs are excluded'
+ end
+ end
+
+ context 'with complex if: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+ rules:
+ - if: $VAR == 'present' && $OTHER || $CI_COMMIT_REF_NAME
+ when: manual
+ allow_failure: true
+ EOY
+ end
+
+ it 'matches the first rule' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job')
+ expect(regular_job.when).to eq('manual')
+ expect(regular_job.allow_failure).to eq(true)
+ end
+ end
+ end
+
+ context 'changes:' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - changes:
+ - README.md
+ when: manual
+ - changes:
+ - app.rb
+ when: on_success
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - changes:
+ - README.md
+ when: delayed
+ start_in: 4 hours
+
+ negligible-job:
+ script: "can be failed sometimes"
+ rules:
+ - changes:
+ - README.md
+ allow_failure: true
+
+ README:
+ script: "I use variables for changes!"
+ rules:
+ - changes:
+ - $CI_JOB_NAME*
+
+ changes-paths:
+ script: "I am using a new syntax!"
+ rules:
+ - changes:
+ paths: [README.md]
+ EOY
+ end
+
+ context 'and matches' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[README.md])
+ end
+ end
+
+ it 'creates five jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly(
+ 'regular-job', 'rules-job', 'delayed-job', 'negligible-job', 'README', 'changes-paths'
+ )
+ end
+
+ it 'sets when: for all jobs' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('manual')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('4 hours')
+ end
+
+ it 'sets allow_failure: for negligible job' do
+ expect(find_job('negligible-job').allow_failure).to eq(true)
+ end
+ end
+
+ context 'and matches the second rule' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[app.rb])
+ end
+ end
+
+ it 'includes both jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job', 'rules-job')
+ end
+
+ it 'sets when: for the created rules job based on the second clause' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('on_success')
+ end
+ end
+
+ context 'and does not match' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[useless_script.rb])
+ end
+ end
+
+ it_behaves_like 'rules jobs are excluded'
+
+ it 'sets when: for the created job' do
+ expect(regular_job.when).to eq('on_success')
+ end
+ end
+
+ context 'with paths and compare_to' do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:user) { project.first_owner }
+
+ before_all do
+ project.repository.add_branch(user, 'feature_1', 'master')
+
+ project.repository.create_file(
+ user, 'file1.txt', 'file 1', message: 'Create file1.txt', branch_name: 'feature_1'
+ )
+
+ project.repository.add_branch(user, 'feature_2', 'feature_1')
+
+ project.repository.create_file(
+ user, 'file2.txt', 'file 2', message: 'Create file2.txt', branch_name: 'feature_2'
+ )
+ end
+
+ let(:changed_file) { 'file2.txt' }
+ let(:ref) { 'feature_2' }
+
+ let(:response) { execute_service(before: nil) }
+
+ context 'for jobs rules' do
+ let(:config) do
+ <<-EOY
+ job1:
+ script: exit 0
+ rules:
+ - changes:
+ paths: [#{changed_file}]
+ compare_to: #{compare_to}
+
+ job2:
+ script: exit 0
+ EOY
+ end
+
+ context 'when there is no such compare_to ref' do
+ let(:compare_to) { 'invalid-branch' }
+
+ it 'returns an error' do
+ expect(pipeline.errors.full_messages).to eq([
+ 'Failed to parse rule for job1: rules:changes:compare_to is not a valid ref'
+ ])
+ end
+
+ context 'when the FF ci_rules_changes_compare is not enabled' do
+ before do
+ stub_feature_flags(ci_rules_changes_compare: false)
+ end
+
+ it 'ignores compare_to and changes is always true' do
+ expect(build_names).to contain_exactly('job1', 'job2')
+ end
+ end
+ end
+
+ context 'when the compare_to ref exists' do
+ let(:compare_to) { 'feature_1' }
+
+ context 'when the rule matches' do
+ it 'creates job1 and job2' do
+ expect(build_names).to contain_exactly('job1', 'job2')
+ end
+
+ context 'when the FF ci_rules_changes_compare is not enabled' do
+ before do
+ stub_feature_flags(ci_rules_changes_compare: false)
+ end
+
+ it 'ignores compare_to and changes is always true' do
+ expect(build_names).to contain_exactly('job1', 'job2')
+ end
+ end
+ end
+
+ context 'when the rule does not match' do
+ let(:changed_file) { 'file1.txt' }
+
+ it 'does not create job1' do
+ expect(build_names).to contain_exactly('job2')
+ end
+
+ context 'when the FF ci_rules_changes_compare is not enabled' do
+ before do
+ stub_feature_flags(ci_rules_changes_compare: false)
+ end
+
+ it 'ignores compare_to and changes is always true' do
+ expect(build_names).to contain_exactly('job1', 'job2')
+ end
+ end
+ end
+ end
+ end
+
+ context 'for workflow rules' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - changes:
+ paths: [#{changed_file}]
+ compare_to: #{compare_to}
+
+ job1:
+ script: exit 0
+ EOY
+ end
+
+ let(:compare_to) { 'feature_1' }
+
+ context 'when the rule matches' do
+ it 'creates job1' do
+ expect(pipeline).to be_created_successfully
+ expect(build_names).to contain_exactly('job1')
+ end
+
+ context 'when the FF ci_rules_changes_compare is not enabled' do
+ before do
+ stub_feature_flags(ci_rules_changes_compare: false)
+ end
+
+ it 'ignores compare_to and changes is always true' do
+ expect(pipeline).to be_created_successfully
+ expect(build_names).to contain_exactly('job1')
+ end
+ end
+ end
+
+ context 'when the rule does not match' do
+ let(:changed_file) { 'file1.txt' }
+
+ it 'does not create job1' do
+ expect(pipeline).not_to be_created_successfully
+ expect(build_names).to be_empty
+ end
+ end
+ end
+ end
+ end
+
+ context 'mixed if: and changes: rules' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ allow_failure: true
+ rules:
+ - changes:
+ - README.md
+ when: manual
+ - if: $CI_COMMIT_REF_NAME == "master"
+ when: on_success
+ allow_failure: false
+
+ delayed-job:
+ script: "echo See you later, World!"
+ rules:
+ - changes:
+ - README.md
+ when: delayed
+ start_in: 4 hours
+ allow_failure: true
+ - if: $CI_COMMIT_REF_NAME == "master"
+ when: delayed
+ start_in: 1 hour
+ EOY
+ end
+
+ context 'and changes: matches before if' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[README.md])
+ end
+ end
+
+ it 'creates two jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names)
+ .to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ end
+
+ it 'sets when: for all jobs' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('manual')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('4 hours')
+ end
+
+ it 'sets allow_failure: for all jobs' do
+ expect(regular_job.allow_failure).to eq(false)
+ expect(rules_job.allow_failure).to eq(true)
+ expect(delayed_job.allow_failure).to eq(true)
+ end
+ end
+
+ context 'and if: matches after changes' do
+ it 'includes both jobs' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job', 'rules-job', 'delayed-job')
+ end
+
+ it 'sets when: for the created rules job based on the second clause' do
+ expect(regular_job.when).to eq('on_success')
+ expect(rules_job.when).to eq('on_success')
+ expect(delayed_job.when).to eq('delayed')
+ expect(delayed_job.options[:start_in]).to eq('1 hour')
+ end
+ end
+
+ context 'and does not match' do
+ let(:ref) { 'refs/heads/wip' }
+
+ it_behaves_like 'rules jobs are excluded'
+
+ it 'sets when: for the created job' do
+ expect(regular_job.when).to eq('on_success')
+ end
+ end
+ end
+
+ context 'mixed if: and changes: clauses' do
+ let(:config) do
+ <<-EOY
+ regular-job:
+ script: 'echo Hello, World!'
+
+ rules-job:
+ script: "echo hello world, $CI_COMMIT_REF_NAME"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ changes: [README.md]
+ when: on_success
+ allow_failure: true
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ changes: [app.rb]
+ when: manual
+ EOY
+ end
+
+ context 'with if matches and changes matches' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[app.rb])
+ end
+ end
+
+ it 'persists all jobs' do
+ expect(pipeline).to be_persisted
+ expect(regular_job).to be_persisted
+ expect(rules_job).to be_persisted
+ expect(rules_job.when).to eq('manual')
+ expect(rules_job.allow_failure).to eq(false)
+ end
+ end
+
+ context 'with if matches and no change matches' do
+ it_behaves_like 'rules jobs are excluded'
+ end
+
+ context 'with change matches and no if matches' do
+ let(:ref) { 'refs/heads/feature' }
+
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[README.md])
+ end
+ end
+
+ it_behaves_like 'rules jobs are excluded'
+ end
+
+ context 'and no matches' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it_behaves_like 'rules jobs are excluded'
+ end
+ end
+
+ context 'complex if: allow_failure usages' do
+ let(:config) do
+ <<-EOY
+ job-1:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: false
+
+ job-2:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: false
+
+ job-3:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: true
+
+ job-4:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: false
+
+ job-5:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ allow_failure: true
+
+ job-6:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ allow_failure: false
+ - allow_failure: true
+ EOY
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('job-1', 'job-4', 'job-5', 'job-6')
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('job-1').allow_failure).to eq(false)
+ expect(find_job('job-4').allow_failure).to eq(false)
+ expect(find_job('job-5').allow_failure).to eq(true)
+ expect(find_job('job-6').allow_failure).to eq(true)
+ end
+ end
+
+ context 'complex if: allow_failure & when usages' do
+ let(:config) do
+ <<-EOY
+ job-1:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-2:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+ allow_failure: true
+
+ job-3:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-4:
+ script: "exit 1"
+ allow_failure: true
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+ allow_failure: false
+
+ job-5:
+ script: "exit 1"
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ when: manual
+ allow_failure: false
+ - when: always
+ allow_failure: true
+
+ job-6:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-7:
+ script: "exit 1"
+ allow_failure: false
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
+ when: manual
+ - when: :on_failure
+ allow_failure: true
+ EOY
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly(
+ 'job-1', 'job-2', 'job-3', 'job-4', 'job-5', 'job-6', 'job-7'
+ )
+ end
+
+ it 'assigns job:allow_failure values to the builds' do
+ expect(find_job('job-1').allow_failure).to eq(false)
+ expect(find_job('job-2').allow_failure).to eq(true)
+ expect(find_job('job-3').allow_failure).to eq(true)
+ expect(find_job('job-4').allow_failure).to eq(false)
+ expect(find_job('job-5').allow_failure).to eq(true)
+ expect(find_job('job-6').allow_failure).to eq(false)
+ expect(find_job('job-7').allow_failure).to eq(true)
+ end
+
+ it 'assigns job:when values to the builds' do
+ expect(find_job('job-1').when).to eq('manual')
+ expect(find_job('job-2').when).to eq('manual')
+ expect(find_job('job-3').when).to eq('manual')
+ expect(find_job('job-4').when).to eq('manual')
+ expect(find_job('job-5').when).to eq('always')
+ expect(find_job('job-6').when).to eq('manual')
+ expect(find_job('job-7').when).to eq('on_failure')
+ end
+ end
+
+ context 'deploy freeze period `if:` clause' do
+ # '0 23 * * 5' == "At 23:00 on Friday."", '0 7 * * 1' == "At 07:00 on Monday.""
+ let!(:freeze_period) { create(:ci_freeze_period, project: project, freeze_start: '0 23 * * 5', freeze_end: '0 7 * * 1') }
+
+ context 'with 2 jobs' do
+ let(:config) do
+ <<-EOY
+ stages:
+ - test
+ - deploy
+
+ test-job:
+ script:
+ - echo 'running TEST stage'
+
+ deploy-job:
+ stage: deploy
+ script:
+ - echo 'running DEPLOY stage'
+ rules:
+ - if: $CI_DEPLOY_FREEZE == null
+ EOY
+ end
+
+ context 'when outside freeze period' do
+ it 'creates two jobs' do
+ Timecop.freeze(2020, 4, 10, 22, 59) do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('test-job', 'deploy-job')
+ end
+ end
+ end
+
+ context 'when inside freeze period' do
+ it 'creates one job' do
+ Timecop.freeze(2020, 4, 10, 23, 1) do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('test-job')
+ end
+ end
+ end
+ end
+
+ context 'with 1 job' do
+ let(:config) do
+ <<-EOY
+ stages:
+ - deploy
+
+ deploy-job:
+ stage: deploy
+ script:
+ - echo 'running DEPLOY stage'
+ rules:
+ - if: $CI_DEPLOY_FREEZE == null
+ EOY
+ end
+
+ context 'when outside freeze period' do
+ it 'creates two jobs' do
+ Timecop.freeze(2020, 4, 10, 22, 59) do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('deploy-job')
+ end
+ end
+ end
+
+ context 'when inside freeze period' do
+ it 'does not create the pipeline', :aggregate_failures do
+ Timecop.freeze(2020, 4, 10, 23, 1) do
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+ end
+ end
+
+ context 'with when:manual' do
+ let(:config) do
+ <<-EOY
+ job-with-rules:
+ script: 'echo hey'
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+
+ job-when-with-rules:
+ script: 'echo hey'
+ when: manual
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+
+ job-when-with-rules-when:
+ script: 'echo hey'
+ when: manual
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: on_success
+
+ job-with-rules-when:
+ script: 'echo hey'
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ when: manual
+
+ job-without-rules:
+ script: 'echo this is a job with NO rules'
+ EOY
+ end
+
+ let(:job_with_rules) { find_job('job-with-rules') }
+ let(:job_when_with_rules) { find_job('job-when-with-rules') }
+ let(:job_when_with_rules_when) { find_job('job-when-with-rules-when') }
+ let(:job_with_rules_when) { find_job('job-with-rules-when') }
+ let(:job_without_rules) { find_job('job-without-rules') }
+
+ context 'when matching the rules' do
+ let(:ref) { 'refs/heads/master' }
+
+ it 'adds the job-with-rules with a when:manual' do
+ expect(job_with_rules).to be_persisted
+ expect(job_when_with_rules).to be_persisted
+ expect(job_when_with_rules_when).to be_persisted
+ expect(job_with_rules_when).to be_persisted
+ expect(job_without_rules).to be_persisted
+
+ expect(job_with_rules.when).to eq('on_success')
+ expect(job_when_with_rules.when).to eq('manual')
+ expect(job_when_with_rules_when.when).to eq('on_success')
+ expect(job_with_rules_when.when).to eq('manual')
+ expect(job_without_rules.when).to eq('on_success')
+ end
+ end
+
+ context 'when there is no match to the rule' do
+ let(:ref) { 'refs/heads/wip' }
+
+ it 'does not add job_with_rules' do
+ expect(job_with_rules).to be_nil
+ expect(job_when_with_rules).to be_nil
+ expect(job_when_with_rules_when).to be_nil
+ expect(job_with_rules_when).to be_nil
+ expect(job_without_rules).to be_persisted
+ end
+ end
end
end
@@ -447,5 +1238,232 @@ RSpec.describe Ci::CreatePipelineService do
end
end
end
+
+ context 'with persisted variables' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME == "master"
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'with matches' do
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job')
+ end
+ end
+
+ context 'with no matches' do
+ let(:ref) { 'refs/heads/feature' }
+
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+
+ context 'with pipeline variables' do
+ let(:pipeline) do
+ execute_service(variables_attributes: variables_attributes).payload
+ end
+
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $SOME_VARIABLE
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'with matches' do
+ let(:variables_attributes) do
+ [{ key: 'SOME_VARIABLE', secret_value: 'SOME_VAR' }]
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job')
+ end
+ end
+
+ context 'with no matches' do
+ let(:variables_attributes) { {} }
+
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+
+ context 'with trigger variables' do
+ let(:pipeline) do
+ execute_service do |pipeline|
+ pipeline.variables.build(variables)
+ end.payload
+ end
+
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $SOME_VARIABLE
+
+ regular-job:
+ script: 'echo Hello, World!'
+ EOY
+ end
+
+ context 'with matches' do
+ let(:variables) do
+ [{ key: 'SOME_VARIABLE', secret_value: 'SOME_VAR' }]
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('regular-job')
+ end
+
+ context 'when a job requires the same variable' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $SOME_VARIABLE
+
+ build:
+ stage: build
+ script: 'echo build'
+ rules:
+ - if: $SOME_VARIABLE
+
+ test1:
+ stage: test
+ script: 'echo test1'
+ needs: [build]
+
+ test2:
+ stage: test
+ script: 'echo test2'
+ EOY
+ end
+
+ it 'creates a pipeline' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('build', 'test1', 'test2')
+ end
+ end
+ end
+
+ context 'with no matches' do
+ let(:variables) { {} }
+
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+
+ context 'when a job requires the same variable' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - if: $SOME_VARIABLE
+
+ build:
+ stage: build
+ script: 'echo build'
+ rules:
+ - if: $SOME_VARIABLE
+
+ test1:
+ stage: test
+ script: 'echo test1'
+ needs: [build]
+
+ test2:
+ stage: test
+ script: 'echo test2'
+ EOY
+ end
+
+ it 'does not create a pipeline', :aggregate_failures do
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+ end
+
+ context 'changes' do
+ shared_examples 'comparing file changes with workflow rules' do
+ context 'when matches' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[file1.md])
+ end
+ end
+
+ it 'creates the pipeline with a job' do
+ expect(pipeline).to be_persisted
+ expect(build_names).to contain_exactly('job')
+ end
+ end
+
+ context 'when does not match' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:modified_paths).and_return(%w[unknown])
+ end
+ end
+
+ it 'creates the pipeline with a job' do
+ expect(pipeline.errors.full_messages).to eq(['Pipeline filtered out by workflow rules.'])
+ expect(response).to be_error
+ expect(pipeline).not_to be_persisted
+ end
+ end
+ end
+
+ context 'changes is an array' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - changes: [file1.md]
+
+ job:
+ script: exit 0
+ EOY
+ end
+
+ it_behaves_like 'comparing file changes with workflow rules'
+ end
+
+ context 'changes:paths is an array' do
+ let(:config) do
+ <<-EOY
+ workflow:
+ rules:
+ - changes:
+ paths: [file1.md]
+
+ job:
+ script: exit 0
+ EOY
+ end
+
+ it_behaves_like 'comparing file changes with workflow rules'
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 9cef7f7dadb..a9442b0dc68 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe Ci::CreatePipelineService do
# rubocop:disable Metrics/ParameterLists
def execute_service(
source: :push,
+ before: '00000000',
after: project.commit.id,
ref: ref_name,
trigger_request: nil,
@@ -29,7 +30,7 @@ RSpec.describe Ci::CreatePipelineService do
target_sha: nil,
save_on_errors: true)
params = { ref: ref,
- before: '00000000',
+ before: before,
after: after,
variables_attributes: variables_attributes,
push_options: push_options,
@@ -1865,818 +1866,6 @@ RSpec.describe Ci::CreatePipelineService do
end
end
end
-
- context 'when rules are used' do
- let(:ref_name) { 'refs/heads/master' }
- let(:response) { execute_service }
- let(:pipeline) { response.payload }
- let(:build_names) { pipeline.builds.pluck(:name) }
- let(:regular_job) { find_job('regular-job') }
- let(:rules_job) { find_job('rules-job') }
- let(:delayed_job) { find_job('delayed-job') }
-
- context 'with when:manual' do
- let(:config) do
- <<-EOY
- job-with-rules:
- script: 'echo hey'
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
-
- job-when-with-rules:
- script: 'echo hey'
- when: manual
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
-
- job-when-with-rules-when:
- script: 'echo hey'
- when: manual
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: on_success
-
- job-with-rules-when:
- script: 'echo hey'
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
-
- job-without-rules:
- script: 'echo this is a job with NO rules'
- EOY
- end
-
- let(:job_with_rules) { find_job('job-with-rules') }
- let(:job_when_with_rules) { find_job('job-when-with-rules') }
- let(:job_when_with_rules_when) { find_job('job-when-with-rules-when') }
- let(:job_with_rules_when) { find_job('job-with-rules-when') }
- let(:job_without_rules) { find_job('job-without-rules') }
-
- context 'when matching the rules' do
- let(:ref_name) { 'refs/heads/master' }
-
- it 'adds the job-with-rules with a when:manual' do
- expect(job_with_rules).to be_persisted
- expect(job_when_with_rules).to be_persisted
- expect(job_when_with_rules_when).to be_persisted
- expect(job_with_rules_when).to be_persisted
- expect(job_without_rules).to be_persisted
-
- expect(job_with_rules.when).to eq('on_success')
- expect(job_when_with_rules.when).to eq('manual')
- expect(job_when_with_rules_when.when).to eq('on_success')
- expect(job_with_rules_when.when).to eq('manual')
- expect(job_without_rules.when).to eq('on_success')
- end
- end
-
- context 'when there is no match to the rule' do
- let(:ref_name) { 'refs/heads/wip' }
-
- it 'does not add job_with_rules' do
- expect(job_with_rules).to be_nil
- expect(job_when_with_rules).to be_nil
- expect(job_when_with_rules_when).to be_nil
- expect(job_with_rules_when).to be_nil
- expect(job_without_rules).to be_persisted
- end
- end
- end
-
- shared_examples 'rules jobs are excluded' do
- it 'only persists the job without rules' do
- expect(pipeline).to be_persisted
- expect(regular_job).to be_persisted
- expect(rules_job).to be_nil
- expect(delayed_job).to be_nil
- end
- end
-
- def find_job(name)
- pipeline.builds.find_by(name: name)
- end
-
- before do
- stub_ci_pipeline_yaml_file(config)
- allow_any_instance_of(Ci::BuildScheduleWorker).to receive(:perform).and_return(true)
- end
-
- context 'with simple if: clauses' do
- let(:config) do
- <<-EOY
- regular-job:
- script: 'echo Hello, World!'
-
- master-job:
- script: "echo hello world, $CI_COMMIT_REF_NAME"
- rules:
- - if: $CI_COMMIT_REF_NAME == "nonexistant-branch"
- when: never
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
-
- negligible-job:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- allow_failure: true
-
- delayed-job:
- script: "echo See you later, World!"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: delayed
- start_in: 1 hour
-
- never-job:
- script: "echo Goodbye, World!"
- rules:
- - if: $CI_COMMIT_REF_NAME
- when: never
- EOY
- end
-
- context 'with matches' do
- it 'creates a pipeline with the vanilla and manual jobs' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly(
- 'regular-job', 'delayed-job', 'master-job', 'negligible-job'
- )
- end
-
- it 'assigns job:when values to the builds' do
- expect(find_job('regular-job').when).to eq('on_success')
- expect(find_job('master-job').when).to eq('manual')
- expect(find_job('negligible-job').when).to eq('on_success')
- expect(find_job('delayed-job').when).to eq('delayed')
- end
-
- it 'assigns job:allow_failure values to the builds' do
- expect(find_job('regular-job').allow_failure).to eq(false)
- expect(find_job('master-job').allow_failure).to eq(false)
- expect(find_job('negligible-job').allow_failure).to eq(true)
- expect(find_job('delayed-job').allow_failure).to eq(false)
- end
-
- it 'assigns start_in for delayed jobs' do
- expect(delayed_job.options[:start_in]).to eq('1 hour')
- end
- end
-
- context 'with no matches' do
- let(:ref_name) { 'refs/heads/feature' }
-
- it_behaves_like 'rules jobs are excluded'
- end
- end
-
- context 'with complex if: clauses' do
- let(:config) do
- <<-EOY
- regular-job:
- script: 'echo Hello, World!'
- rules:
- - if: $VAR == 'present' && $OTHER || $CI_COMMIT_REF_NAME
- when: manual
- allow_failure: true
- EOY
- end
-
- it 'matches the first rule' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job')
- expect(regular_job.when).to eq('manual')
- expect(regular_job.allow_failure).to eq(true)
- end
- end
-
- context 'with changes:' do
- let(:config) do
- <<-EOY
- regular-job:
- script: 'echo Hello, World!'
-
- rules-job:
- script: "echo hello world, $CI_COMMIT_REF_NAME"
- rules:
- - changes:
- - README.md
- when: manual
- - changes:
- - app.rb
- when: on_success
-
- delayed-job:
- script: "echo See you later, World!"
- rules:
- - changes:
- - README.md
- when: delayed
- start_in: 4 hours
-
- negligible-job:
- script: "can be failed sometimes"
- rules:
- - changes:
- - README.md
- allow_failure: true
-
- README:
- script: "I use variables for changes!"
- rules:
- - changes:
- - $CI_JOB_NAME*
-
- changes-paths:
- script: "I am using a new syntax!"
- rules:
- - changes:
- paths: [README.md]
- EOY
- end
-
- context 'and matches' do
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[README.md])
- end
-
- it 'creates five jobs' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly(
- 'regular-job', 'rules-job', 'delayed-job', 'negligible-job', 'README', 'changes-paths'
- )
- end
-
- it 'sets when: for all jobs' do
- expect(regular_job.when).to eq('on_success')
- expect(rules_job.when).to eq('manual')
- expect(delayed_job.when).to eq('delayed')
- expect(delayed_job.options[:start_in]).to eq('4 hours')
- end
-
- it 'sets allow_failure: for negligible job' do
- expect(find_job('negligible-job').allow_failure).to eq(true)
- end
- end
-
- context 'and matches the second rule' do
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[app.rb])
- end
-
- it 'includes both jobs' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job', 'rules-job')
- end
-
- it 'sets when: for the created rules job based on the second clause' do
- expect(regular_job.when).to eq('on_success')
- expect(rules_job.when).to eq('on_success')
- end
- end
-
- context 'and does not match' do
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[useless_script.rb])
- end
-
- it_behaves_like 'rules jobs are excluded'
-
- it 'sets when: for the created job' do
- expect(regular_job.when).to eq('on_success')
- end
- end
- end
-
- context 'with mixed if: and changes: rules' do
- let(:config) do
- <<-EOY
- regular-job:
- script: 'echo Hello, World!'
-
- rules-job:
- script: "echo hello world, $CI_COMMIT_REF_NAME"
- allow_failure: true
- rules:
- - changes:
- - README.md
- when: manual
- - if: $CI_COMMIT_REF_NAME == "master"
- when: on_success
- allow_failure: false
-
- delayed-job:
- script: "echo See you later, World!"
- rules:
- - changes:
- - README.md
- when: delayed
- start_in: 4 hours
- allow_failure: true
- - if: $CI_COMMIT_REF_NAME == "master"
- when: delayed
- start_in: 1 hour
- EOY
- end
-
- context 'and changes: matches before if' do
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[README.md])
- end
-
- it 'creates two jobs' do
- expect(pipeline).to be_persisted
- expect(build_names)
- .to contain_exactly('regular-job', 'rules-job', 'delayed-job')
- end
-
- it 'sets when: for all jobs' do
- expect(regular_job.when).to eq('on_success')
- expect(rules_job.when).to eq('manual')
- expect(delayed_job.when).to eq('delayed')
- expect(delayed_job.options[:start_in]).to eq('4 hours')
- end
-
- it 'sets allow_failure: for all jobs' do
- expect(regular_job.allow_failure).to eq(false)
- expect(rules_job.allow_failure).to eq(true)
- expect(delayed_job.allow_failure).to eq(true)
- end
- end
-
- context 'and if: matches after changes' do
- it 'includes both jobs' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job', 'rules-job', 'delayed-job')
- end
-
- it 'sets when: for the created rules job based on the second clause' do
- expect(regular_job.when).to eq('on_success')
- expect(rules_job.when).to eq('on_success')
- expect(delayed_job.when).to eq('delayed')
- expect(delayed_job.options[:start_in]).to eq('1 hour')
- end
- end
-
- context 'and does not match' do
- let(:ref_name) { 'refs/heads/wip' }
-
- it_behaves_like 'rules jobs are excluded'
-
- it 'sets when: for the created job' do
- expect(regular_job.when).to eq('on_success')
- end
- end
- end
-
- context 'with mixed if: and changes: clauses' do
- let(:config) do
- <<-EOY
- regular-job:
- script: 'echo Hello, World!'
-
- rules-job:
- script: "echo hello world, $CI_COMMIT_REF_NAME"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- changes: [README.md]
- when: on_success
- allow_failure: true
- - if: $CI_COMMIT_REF_NAME =~ /master/
- changes: [app.rb]
- when: manual
- EOY
- end
-
- context 'with if matches and changes matches' do
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[app.rb])
- end
-
- it 'persists all jobs' do
- expect(pipeline).to be_persisted
- expect(regular_job).to be_persisted
- expect(rules_job).to be_persisted
- expect(rules_job.when).to eq('manual')
- expect(rules_job.allow_failure).to eq(false)
- end
- end
-
- context 'with if matches and no change matches' do
- it_behaves_like 'rules jobs are excluded'
- end
-
- context 'with change matches and no if matches' do
- let(:ref_name) { 'refs/heads/feature' }
-
- before do
- allow_any_instance_of(Ci::Pipeline)
- .to receive(:modified_paths).and_return(%w[README.md])
- end
-
- it_behaves_like 'rules jobs are excluded'
- end
-
- context 'and no matches' do
- let(:ref_name) { 'refs/heads/feature' }
-
- it_behaves_like 'rules jobs are excluded'
- end
- end
-
- context 'with complex if: allow_failure usages' do
- let(:config) do
- <<-EOY
- job-1:
- script: "exit 1"
- allow_failure: true
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- allow_failure: false
-
- job-2:
- script: "exit 1"
- allow_failure: true
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
- allow_failure: false
-
- job-3:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
- allow_failure: true
-
- job-4:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- allow_failure: false
-
- job-5:
- script: "exit 1"
- allow_failure: false
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- allow_failure: true
-
- job-6:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
- allow_failure: false
- - allow_failure: true
- EOY
- end
-
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('job-1', 'job-4', 'job-5', 'job-6')
- end
-
- it 'assigns job:allow_failure values to the builds' do
- expect(find_job('job-1').allow_failure).to eq(false)
- expect(find_job('job-4').allow_failure).to eq(false)
- expect(find_job('job-5').allow_failure).to eq(true)
- expect(find_job('job-6').allow_failure).to eq(true)
- end
- end
-
- context 'with complex if: allow_failure & when usages' do
- let(:config) do
- <<-EOY
- job-1:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
-
- job-2:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
- allow_failure: true
-
- job-3:
- script: "exit 1"
- allow_failure: true
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
-
- job-4:
- script: "exit 1"
- allow_failure: true
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
- allow_failure: false
-
- job-5:
- script: "exit 1"
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
- when: manual
- allow_failure: false
- - when: always
- allow_failure: true
-
- job-6:
- script: "exit 1"
- allow_failure: false
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /master/
- when: manual
-
- job-7:
- script: "exit 1"
- allow_failure: false
- rules:
- - if: $CI_COMMIT_REF_NAME =~ /nonexistant-branch/
- when: manual
- - when: :on_failure
- allow_failure: true
- EOY
- end
-
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly(
- 'job-1', 'job-2', 'job-3', 'job-4', 'job-5', 'job-6', 'job-7'
- )
- end
-
- it 'assigns job:allow_failure values to the builds' do
- expect(find_job('job-1').allow_failure).to eq(false)
- expect(find_job('job-2').allow_failure).to eq(true)
- expect(find_job('job-3').allow_failure).to eq(true)
- expect(find_job('job-4').allow_failure).to eq(false)
- expect(find_job('job-5').allow_failure).to eq(true)
- expect(find_job('job-6').allow_failure).to eq(false)
- expect(find_job('job-7').allow_failure).to eq(true)
- end
-
- it 'assigns job:when values to the builds' do
- expect(find_job('job-1').when).to eq('manual')
- expect(find_job('job-2').when).to eq('manual')
- expect(find_job('job-3').when).to eq('manual')
- expect(find_job('job-4').when).to eq('manual')
- expect(find_job('job-5').when).to eq('always')
- expect(find_job('job-6').when).to eq('manual')
- expect(find_job('job-7').when).to eq('on_failure')
- end
- end
-
- context 'with deploy freeze period `if:` clause' do
- # '0 23 * * 5' == "At 23:00 on Friday."", '0 7 * * 1' == "At 07:00 on Monday.""
- let!(:freeze_period) { create(:ci_freeze_period, project: project, freeze_start: '0 23 * * 5', freeze_end: '0 7 * * 1') }
-
- context 'with 2 jobs' do
- let(:config) do
- <<-EOY
- stages:
- - test
- - deploy
-
- test-job:
- script:
- - echo 'running TEST stage'
-
- deploy-job:
- stage: deploy
- script:
- - echo 'running DEPLOY stage'
- rules:
- - if: $CI_DEPLOY_FREEZE == null
- EOY
- end
-
- context 'when outside freeze period' do
- it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('test-job', 'deploy-job')
- end
- end
- end
-
- context 'when inside freeze period' do
- it 'creates one job' do
- Timecop.freeze(2020, 4, 10, 23, 1) do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('test-job')
- end
- end
- end
- end
-
- context 'with 1 job' do
- let(:config) do
- <<-EOY
- stages:
- - deploy
-
- deploy-job:
- stage: deploy
- script:
- - echo 'running DEPLOY stage'
- rules:
- - if: $CI_DEPLOY_FREEZE == null
- EOY
- end
-
- context 'when outside freeze period' do
- it 'creates two jobs' do
- Timecop.freeze(2020, 4, 10, 22, 59) do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('deploy-job')
- end
- end
- end
-
- context 'when inside freeze period' do
- it 'does not create the pipeline', :aggregate_failures do
- Timecop.freeze(2020, 4, 10, 23, 1) do
- expect(response).to be_error
- expect(pipeline).not_to be_persisted
- end
- end
- end
- end
- end
-
- context 'with workflow rules with persisted variables' do
- let(:config) do
- <<-EOY
- workflow:
- rules:
- - if: $CI_COMMIT_REF_NAME == "master"
-
- regular-job:
- script: 'echo Hello, World!'
- EOY
- end
-
- context 'with matches' do
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job')
- end
- end
-
- context 'with no matches' do
- let(:ref_name) { 'refs/heads/feature' }
-
- it 'does not create a pipeline', :aggregate_failures do
- expect(response).to be_error
- expect(pipeline).not_to be_persisted
- end
- end
- end
-
- context 'with workflow rules with pipeline variables' do
- let(:pipeline) do
- execute_service(variables_attributes: variables_attributes).payload
- end
-
- let(:config) do
- <<-EOY
- workflow:
- rules:
- - if: $SOME_VARIABLE
-
- regular-job:
- script: 'echo Hello, World!'
- EOY
- end
-
- context 'with matches' do
- let(:variables_attributes) do
- [{ key: 'SOME_VARIABLE', secret_value: 'SOME_VAR' }]
- end
-
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job')
- end
- end
-
- context 'with no matches' do
- let(:variables_attributes) { {} }
-
- it 'does not create a pipeline', :aggregate_failures do
- expect(response).to be_error
- expect(pipeline).not_to be_persisted
- end
- end
- end
-
- context 'with workflow rules with trigger variables' do
- let(:pipeline) do
- execute_service do |pipeline|
- pipeline.variables.build(variables)
- end.payload
- end
-
- let(:config) do
- <<-EOY
- workflow:
- rules:
- - if: $SOME_VARIABLE
-
- regular-job:
- script: 'echo Hello, World!'
- EOY
- end
-
- context 'with matches' do
- let(:variables) do
- [{ key: 'SOME_VARIABLE', secret_value: 'SOME_VAR' }]
- end
-
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('regular-job')
- end
-
- context 'when a job requires the same variable' do
- let(:config) do
- <<-EOY
- workflow:
- rules:
- - if: $SOME_VARIABLE
-
- build:
- stage: build
- script: 'echo build'
- rules:
- - if: $SOME_VARIABLE
-
- test1:
- stage: test
- script: 'echo test1'
- needs: [build]
-
- test2:
- stage: test
- script: 'echo test2'
- EOY
- end
-
- it 'creates a pipeline' do
- expect(pipeline).to be_persisted
- expect(build_names).to contain_exactly('build', 'test1', 'test2')
- end
- end
- end
-
- context 'with no matches' do
- let(:variables) { {} }
-
- it 'does not create a pipeline', :aggregate_failures do
- expect(response).to be_error
- expect(pipeline).not_to be_persisted
- end
-
- context 'when a job requires the same variable' do
- let(:config) do
- <<-EOY
- workflow:
- rules:
- - if: $SOME_VARIABLE
-
- build:
- stage: build
- script: 'echo build'
- rules:
- - if: $SOME_VARIABLE
-
- test1:
- stage: test
- script: 'echo test1'
- needs: [build]
-
- test2:
- stage: test
- script: 'echo test2'
- EOY
- end
-
- it 'does not create a pipeline', :aggregate_failures do
- expect(response).to be_error
- expect(pipeline).not_to be_persisted
- end
- end
- end
- end
- end
end
describe '#execute!' do
diff --git a/spec/services/ci/deployments/destroy_service_spec.rb b/spec/services/ci/deployments/destroy_service_spec.rb
new file mode 100644
index 00000000000..60a57c05728
--- /dev/null
+++ b/spec/services/ci/deployments/destroy_service_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Deployments::DestroyService do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:environment) { create(:environment, project: project) }
+ let(:commits) { project.repository.commits(nil, { limit: 3 }) }
+ let!(:deploy) do
+ create(
+ :deployment,
+ :success,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[2].sha
+ )
+ end
+
+ let!(:running_deploy) do
+ create(
+ :deployment,
+ :running,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[1].sha
+ )
+ end
+
+ let!(:old_deploy) do
+ create(
+ :deployment,
+ :success,
+ project: project,
+ environment: environment,
+ deployable: nil,
+ sha: commits[0].sha,
+ finished_at: 1.year.ago
+ )
+ end
+
+ let(:user) { project.first_owner }
+
+ subject { described_class.new(project, user) }
+
+ context 'when deleting a deployment' do
+ it 'delete is accepted for old deployment' do
+ expect(subject.execute(old_deploy)).to be_success
+ end
+
+ it 'does not delete a running deployment' do
+ response = subject.execute(running_deploy)
+ expect(response).to be_an_error
+ expect(response.message).to eq("Cannot destroy running deployment")
+ end
+
+ it 'does not delete the last deployment' do
+ response = subject.execute(deploy)
+ expect(response).to be_an_error
+ expect(response.message).to eq("Deployment currently deployed to environment")
+ end
+ end
+end
diff --git a/spec/services/ci/destroy_pipeline_service_spec.rb b/spec/services/ci/destroy_pipeline_service_spec.rb
index 045051c7152..6bd7fe7559c 100644
--- a/spec/services/ci/destroy_pipeline_service_spec.rb
+++ b/spec/services/ci/destroy_pipeline_service_spec.rb
@@ -90,15 +90,23 @@ RSpec.describe ::Ci::DestroyPipelineService do
end
end
- context 'when pipeline is in cancelable state' do
- before do
- allow(pipeline).to receive(:cancelable?).and_return(true)
- end
+ context 'when pipeline is in cancelable state', :sidekiq_inline do
+ let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
+ let!(:child_pipeline) { create(:ci_pipeline, :running, child_of: pipeline) }
+ let!(:child_build) { create(:ci_build, :running, pipeline: child_pipeline) }
+
+ it 'cancels the pipelines sync' do
+ # turn off deletion for all instances of pipeline to allow for testing cancellation
+ allow(pipeline).to receive_message_chain(:reset, :destroy!)
+ allow_next_found_instance_of(Ci::Pipeline) { |p| allow(p).to receive_message_chain(:reset, :destroy!) }
- it 'cancels the pipeline' do
- expect(pipeline).to receive(:cancel_running)
+ # ensure cancellation happens sync so we accumulate minutes
+ expect(::Ci::CancelPipelineWorker).not_to receive(:perform)
subject
+
+ expect(build.reload.status).to eq('canceled')
+ expect(child_build.reload.status).to eq('canceled')
end
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index b7a810ce47e..7b3f67b192f 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -34,6 +34,14 @@ RSpec.describe Ci::JobArtifacts::CreateService do
subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
context 'when artifacts file is uploaded' do
+ it 'logs the created artifact' do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_created)
+ .with(an_instance_of(Ci::JobArtifact))
+
+ subject
+ end
+
it 'returns artifact in the response' do
response = subject
new_artifact = job.job_artifacts.last
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 05069054483..9ca39d4d32e 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -40,7 +40,14 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
expect { execute }.not_to change { artifact_with_file.file.exists? }
end
- it 'deletes the artifact records' do
+ it 'deletes the artifact records and logs them' do
+ expect(Gitlab::Ci::Artifacts::Logger)
+ .to receive(:log_deleted)
+ .with(
+ match_array([artifact_with_file, artifact_without_file]),
+ 'Ci::JobArtifacts::DestroyBatchService#execute'
+ )
+
expect { subject }.to change { Ci::JobArtifact.count }.by(-2)
end
diff --git a/spec/services/ci/list_config_variables_service_spec.rb b/spec/services/ci/list_config_variables_service_spec.rb
index 1735f4cfc97..4953b18bfcc 100644
--- a/spec/services/ci/list_config_variables_service_spec.rb
+++ b/spec/services/ci/list_config_variables_service_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe Ci::ListConfigVariablesService, :use_clean_rails_memory_store_cac
it 'returns variable list' do
expect(subject['KEY1']).to eq({ value: 'val 1', description: 'description 1' })
expect(subject['KEY2']).to eq({ value: 'val 2', description: '' })
- expect(subject['KEY3']).to eq({ value: 'val 3', description: nil })
- expect(subject['KEY4']).to eq({ value: 'val 4', description: nil })
+ expect(subject['KEY3']).to eq({ value: 'val 3' })
+ expect(subject['KEY4']).to eq({ value: 'val 4' })
end
end
diff --git a/spec/services/ci/parse_dotenv_artifact_service_spec.rb b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
index aaab849cd93..7b3af33ac72 100644
--- a/spec/services/ci/parse_dotenv_artifact_service_spec.rb
+++ b/spec/services/ci/parse_dotenv_artifact_service_spec.rb
@@ -292,7 +292,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
end
context 'when build does not have a dotenv artifact' do
- let!(:artifact) { }
+ let!(:artifact) {}
it 'raises an error' do
expect { subject }.to raise_error(ArgumentError)
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
index 7868629d34d..289e004fcce 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service/status_collection_spec.rb
@@ -87,7 +87,7 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService::StatusCollection
describe '#processing_processables' do
it 'returns processables marked as processing' do
- expect(collection.processing_processables.map { |processable| processable[:id]} )
+ expect(collection.processing_processables.map { |processable| processable[:id] } )
.to contain_exactly(build_a.id, build_b.id, test_a.id, test_b.id, deploy.id)
end
end
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index b54fc45d36a..2fcb4ce73ff 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
context 'when build has delayed option' do
before do
- allow(Ci::BuildScheduleWorker).to receive(:perform_at) { }
+ allow(Ci::BuildScheduleWorker).to receive(:perform_at) {}
end
let(:build) { create(:ci_build, :created, :schedulable, user: user, project: project) }
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 2316575f164..cabd60a22d1 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -129,6 +129,12 @@ module Ci
let!(:build2_project2) { create(:ci_build, :pending, :queued, pipeline: pipeline2) }
let!(:build1_project3) { create(:ci_build, :pending, :queued, pipeline: pipeline3) }
+ it 'picks builds one-by-one' do
+ expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
+
+ expect(execute(shared_runner)).to eq(build1_project1)
+ end
+
context 'when using fair scheduling' do
context 'when all builds are pending' do
it 'prefers projects without builds first' do
@@ -485,6 +491,48 @@ module Ci
end
context 'when "dependencies" keyword is specified' do
+ let!(:pre_stage_job) do
+ create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'test', stage_idx: 0)
+ end
+
+ let!(:pending_job) do
+ create(:ci_build, :pending, :queued,
+ pipeline: pipeline, stage_idx: 1,
+ options: { script: ["bash"], dependencies: dependencies })
+ end
+
+ let(:dependencies) { %w[test] }
+
+ subject { execute(specific_runner) }
+
+ it 'picks a build with a dependency' do
+ picked_build = execute(specific_runner)
+
+ expect(picked_build).to be_present
+ end
+
+ context 'when there are multiple dependencies with artifacts' do
+ let!(:pre_stage_job_second) do
+ create(:ci_build, :success, :artifacts, pipeline: pipeline, name: 'deploy', stage_idx: 0)
+ end
+
+ let(:dependencies) { %w[test deploy] }
+
+ it 'logs build artifacts size' do
+ execute(specific_runner)
+
+ artifacts_size = [pre_stage_job, pre_stage_job_second].sum do |job|
+ job.job_artifacts_archive.size
+ end
+
+ expect(artifacts_size).to eq 107464 * 2
+ expect(Gitlab::ApplicationContext.current).to include({
+ 'meta.artifacts_dependencies_size' => artifacts_size,
+ 'meta.artifacts_dependencies_count' => 2
+ })
+ end
+ end
+
shared_examples 'not pick' do
it 'does not pick the build and drops the build' do
expect(subject).to be_nil
@@ -572,16 +620,6 @@ module Ci
end
end
- let!(:pre_stage_job) { create(:ci_build, :success, pipeline: pipeline, name: 'test', stage_idx: 0) }
-
- let!(:pending_job) do
- create(:ci_build, :pending, :queued,
- pipeline: pipeline, stage_idx: 1,
- options: { script: ["bash"], dependencies: ['test'] })
- end
-
- subject { execute(specific_runner) }
-
it_behaves_like 'validation is active'
end
@@ -739,16 +777,6 @@ module Ci
end
end
- context 'when a long queue is created' do
- it 'picks builds one-by-one' do
- expect(Ci::Build).to receive(:find).with(pending_job.id).and_call_original
-
- expect(execute(specific_runner)).to eq(pending_job)
- end
-
- include_examples 'handles runner assignment'
- end
-
context 'when using pending builds table' do
include_examples 'handles runner assignment'
diff --git a/spec/services/ci/retry_job_service_spec.rb b/spec/services/ci/retry_job_service_spec.rb
index f042471bd1f..b14e4187c7a 100644
--- a/spec/services/ci/retry_job_service_spec.rb
+++ b/spec/services/ci/retry_job_service_spec.rb
@@ -17,6 +17,7 @@ RSpec.describe Ci::RetryJobService do
name: 'test')
end
+ let(:job_variables_attributes) { [{ key: 'MANUAL_VAR', value: 'manual test var' }] }
let(:user) { developer }
let(:service) { described_class.new(project, user) }
@@ -206,6 +207,14 @@ RSpec.describe Ci::RetryJobService do
include_context 'retryable bridge'
it_behaves_like 'clones the job'
+
+ context 'when given variables' do
+ let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
+
+ it 'does not give variables to the new bridge' do
+ expect { new_job }.not_to raise_error
+ end
+ end
end
context 'when the job to be cloned is a build' do
@@ -250,6 +259,28 @@ RSpec.describe Ci::RetryJobService do
expect { new_job }.not_to change { Environment.count }
end
end
+
+ context 'when given variables' do
+ let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
+
+ context 'when the build is actionable' do
+ let_it_be_with_refind(:job) { create(:ci_build, :actionable, pipeline: pipeline) }
+
+ it 'gives variables to the new build' do
+ expect(new_job.job_variables.count).to be(1)
+ expect(new_job.job_variables.first.key).to eq('MANUAL_VAR')
+ expect(new_job.job_variables.first.value).to eq('manual test var')
+ end
+ end
+
+ context 'when the build is not actionable' do
+ let_it_be_with_refind(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'does not give variables to the new build' do
+ expect(new_job.job_variables.count).to be_zero
+ end
+ end
+ end
end
end
@@ -260,6 +291,14 @@ RSpec.describe Ci::RetryJobService do
include_context 'retryable bridge'
it_behaves_like 'retries the job'
+
+ context 'when given variables' do
+ let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
+
+ it 'does not give variables to the new bridge' do
+ expect { new_job }.not_to raise_error
+ end
+ end
end
context 'when the job to be retried is a build' do
@@ -288,6 +327,28 @@ RSpec.describe Ci::RetryJobService do
expect { service.execute(job) }.not_to exceed_all_query_limit(control_count)
end
end
+
+ context 'when given variables' do
+ let(:new_job) { service.clone!(job, variables: job_variables_attributes) }
+
+ context 'when the build is actionable' do
+ let_it_be_with_refind(:job) { create(:ci_build, :actionable, pipeline: pipeline) }
+
+ it 'gives variables to the new build' do
+ expect(new_job.job_variables.count).to be(1)
+ expect(new_job.job_variables.first.key).to eq('MANUAL_VAR')
+ expect(new_job.job_variables.first.value).to eq('manual test var')
+ end
+ end
+
+ context 'when the build is not actionable' do
+ let_it_be_with_refind(:job) { create(:ci_build, pipeline: pipeline) }
+
+ it 'does not give variables to the new build' do
+ expect(new_job.job_variables.count).to be_zero
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/ci/runners/assign_runner_service_spec.rb b/spec/services/ci/runners/assign_runner_service_spec.rb
index 00b176bb759..08bb99830fb 100644
--- a/spec/services/ci/runners/assign_runner_service_spec.rb
+++ b/spec/services/ci/runners/assign_runner_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute' do
- subject { described_class.new(runner, project, user).execute }
+ subject(:execute) { described_class.new(runner, project, user).execute }
let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
let_it_be(:project) { create(:project) }
@@ -11,30 +11,32 @@ RSpec.describe ::Ci::Runners::AssignRunnerService, '#execute' do
context 'without user' do
let(:user) { nil }
- it 'does not call assign_to on runner and returns false' do
+ it 'does not call assign_to on runner and returns error response', :aggregate_failures do
expect(runner).not_to receive(:assign_to)
- is_expected.to eq(false)
+ is_expected.to be_error
+ expect(execute.message).to eq('user not allowed to assign runner')
end
end
context 'with unauthorized user' do
let(:user) { build(:user) }
- it 'does not call assign_to on runner and returns false' do
+ it 'does not call assign_to on runner and returns error message' do
expect(runner).not_to receive(:assign_to)
- is_expected.to eq(false)
+ is_expected.to be_error
+ expect(execute.message).to eq('user not allowed to assign runner')
end
end
context 'with admin user', :enable_admin_mode do
let(:user) { create_default(:user, :admin) }
- it 'calls assign_to on runner and returns value unchanged' do
- expect(runner).to receive(:assign_to).with(project, user).once.and_return('assign_to return value')
+ it 'calls assign_to on runner and returns success response' do
+ expect(runner).to receive(:assign_to).with(project, user).once.and_call_original
- is_expected.to eq('assign_to return value')
+ is_expected.to be_success
end
end
end
diff --git a/spec/services/ci/runners/bulk_delete_runners_service_spec.rb b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
new file mode 100644
index 00000000000..8e9fc4e3012
--- /dev/null
+++ b/spec/services/ci/runners/bulk_delete_runners_service_spec.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::Runners::BulkDeleteRunnersService, '#execute' do
+ subject(:execute) { described_class.new(**service_args).execute }
+
+ let(:service_args) { { runners: runners_arg } }
+ let(:runners_arg) {}
+
+ context 'with runners specified' do
+ let!(:instance_runner) { create(:ci_runner) }
+ let!(:group_runner) { create(:ci_runner, :group) }
+ let!(:project_runner) { create(:ci_runner, :project) }
+
+ shared_examples 'a service deleting runners in bulk' do
+ it 'destroys runners', :aggregate_failures do
+ expect { subject }.to change { Ci::Runner.count }.by(-2)
+
+ is_expected.to be_success
+ expect(execute.payload).to eq({ deleted_count: 2, deleted_ids: [instance_runner.id, project_runner.id] })
+ expect(instance_runner[:errors]).to be_nil
+ expect(project_runner[:errors]).to be_nil
+ expect { project_runner.runner_projects.first.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { group_runner.reload }.not_to raise_error
+ expect { instance_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ expect { project_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ context 'with some runners already deleted' do
+ before do
+ instance_runner.destroy!
+ end
+
+ let(:runners_arg) { [instance_runner.id, project_runner.id] }
+
+ it 'destroys runners and returns only deleted runners', :aggregate_failures do
+ expect { subject }.to change { Ci::Runner.count }.by(-1)
+
+ is_expected.to be_success
+ expect(execute.payload).to eq({ deleted_count: 1, deleted_ids: [project_runner.id] })
+ expect(instance_runner[:errors]).to be_nil
+ expect(project_runner[:errors]).to be_nil
+ expect { project_runner.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
+
+ context 'with too many runners specified' do
+ before do
+ stub_const("#{described_class}::RUNNER_LIMIT", 1)
+ end
+
+ it 'deletes only first RUNNER_LIMIT runners' do
+ expect { subject }.to change { Ci::Runner.count }.by(-1)
+
+ is_expected.to be_success
+ expect(execute.payload).to eq({ deleted_count: 1, deleted_ids: [instance_runner.id] })
+ end
+ end
+ end
+
+ context 'with runners specified as relation' do
+ let(:runners_arg) { Ci::Runner.not_group_type }
+
+ include_examples 'a service deleting runners in bulk'
+ end
+
+ context 'with runners specified as array of IDs' do
+ let(:runners_arg) { Ci::Runner.not_group_type.ids }
+
+ include_examples 'a service deleting runners in bulk'
+ end
+
+ context 'with no arguments specified' do
+ let(:runners_arg) { nil }
+
+ it 'returns 0 deleted runners' do
+ is_expected.to be_success
+ expect(execute.payload).to eq({ deleted_count: 0, deleted_ids: [] })
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/runners/process_runner_version_update_service_spec.rb b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
new file mode 100644
index 00000000000..b885138fc7a
--- /dev/null
+++ b/spec/services/ci/runners/process_runner_version_update_service_spec.rb
@@ -0,0 +1,80 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateService do
+ subject(:service) { described_class.new(version) }
+
+ let(:version) { '1.0.0' }
+ let(:available_runner_releases) { %w[1.0.0 1.0.1] }
+
+ describe '#execute' do
+ subject(:execute) { service.execute }
+
+ context 'with upgrade check returning error' do
+ let(:service_double) { instance_double(Gitlab::Ci::RunnerUpgradeCheck) }
+
+ before do
+ allow(service_double).to receive(:check_runner_upgrade_suggestion).with(version)
+ .and_return([version, :error])
+ allow(service).to receive(:upgrade_check_service).and_return(service_double)
+ end
+
+ it 'does not update ci_runner_versions records', :aggregate_failures do
+ expect do
+ expect(execute).to be_error
+ expect(execute.message).to eq 'upgrade version check failed'
+ end.not_to change(Ci::RunnerVersion, :count).from(0)
+ expect(service_double).to have_received(:check_runner_upgrade_suggestion).with(version).once
+ end
+ end
+
+ context 'with successful result from upgrade check' do
+ before do
+ url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+
+ WebMock.stub_request(:get, url).to_return(
+ body: available_runner_releases.map { |v| { name: v } }.to_json,
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ context 'with no existing ci_runner_version record' do
+ it 'creates ci_runner_versions record', :aggregate_failures do
+ expect do
+ expect(execute).to be_success
+ expect(execute.http_status).to eq :ok
+ expect(execute.payload).to eq({ upgrade_status: 'recommended' })
+ end.to change(Ci::RunnerVersion, :all).to contain_exactly(
+ an_object_having_attributes(version: version, status: 'recommended')
+ )
+ end
+ end
+
+ context 'with existing ci_runner_version record' do
+ let!(:runner_version) { create(:ci_runner_version, version: '1.0.0', status: :not_available) }
+
+ it 'updates ci_runner_versions record', :aggregate_failures do
+ expect do
+ expect(execute).to be_success
+ expect(execute.http_status).to eq :ok
+ expect(execute.payload).to eq({ upgrade_status: 'recommended' })
+ end.to change { runner_version.reload.status }.from('not_available').to('recommended')
+ end
+ end
+
+ context 'with up-to-date ci_runner_version record' do
+ let!(:runner_version) { create(:ci_runner_version, version: '1.0.0', status: :recommended) }
+
+ it 'does not update ci_runner_versions record', :aggregate_failures do
+ expect do
+ expect(execute).to be_success
+ expect(execute.http_status).to eq :ok
+ expect(execute.payload).to eq({ upgrade_status: 'recommended' })
+ end.not_to change { runner_version.reload.status }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
index f8313eaab90..1690190320a 100644
--- a/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
+++ b/spec/services/ci/runners/reconcile_existing_runner_versions_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute' do
+ include RunnerReleasesHelper
+
subject(:execute) { described_class.new.execute }
let_it_be(:runner_14_0_1) { create(:ci_runner, version: '14.0.1') }
@@ -11,12 +13,12 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
end
context 'with RunnerUpgradeCheck recommending 14.0.2' do
+ let(:upgrade_check) { instance_double(::Gitlab::Ci::RunnerUpgradeCheck) }
+
before do
stub_const('Ci::Runners::ReconcileExistingRunnerVersionsService::VERSION_BATCH_SIZE', 1)
- allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
- .and_return({ recommended: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ allow(::Gitlab::Ci::RunnerUpgradeCheck).to receive(:new).and_return(upgrade_check).once
end
context 'with runner with new version' do
@@ -25,10 +27,11 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
let!(:runner_14_0_0) { create(:ci_runner, version: '14.0.0') }
before do
- allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
+ allow(upgrade_check).to receive(:check_runner_upgrade_suggestion)
+ .and_return([::Gitlab::VersionInfo.new(14, 0, 2), :recommended])
+ allow(upgrade_check).to receive(:check_runner_upgrade_suggestion)
.with('14.0.2')
- .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ .and_return([::Gitlab::VersionInfo.new(14, 0, 2), :not_available])
.once
end
@@ -39,14 +42,13 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
.once
.and_call_original
- result = nil
- expect { result = execute }
+ expect { execute }
.to change { runner_version_14_0_0.reload.status }.from('not_available').to('recommended')
.and change { runner_version_14_0_1.reload.status }.from('not_available').to('recommended')
.and change { ::Ci::RunnerVersion.find_by(version: '14.0.2')&.status }.from(nil).to('not_available')
- expect(result).to eq({
- status: :success,
+ expect(execute).to be_success
+ expect(execute.payload).to eq({
total_inserted: 1, # 14.0.2 is inserted
total_updated: 3, # 14.0.0, 14.0.1 are updated, and newly inserted 14.0.2's status is calculated
total_deleted: 0
@@ -58,19 +60,17 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
let!(:runner_version_14_0_2) { create(:ci_runner_version, version: '14.0.2', status: :not_available) }
before do
- allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
- .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 2) })
+ allow(upgrade_check).to receive(:check_runner_upgrade_suggestion)
+ .and_return([::Gitlab::VersionInfo.new(14, 0, 2), :not_available])
end
it 'deletes orphan ci_runner_versions entry', :aggregate_failures do
- result = nil
- expect { result = execute }
+ expect { execute }
.to change { ::Ci::RunnerVersion.find_by_version('14.0.2')&.status }.from('not_available').to(nil)
.and not_change { runner_version_14_0_1.reload.status }.from('not_available')
- expect(result).to eq({
- status: :success,
+ expect(execute).to be_success
+ expect(execute.payload).to eq({
total_inserted: 0,
total_updated: 0,
total_deleted: 1 # 14.0.2 is deleted
@@ -80,17 +80,15 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
context 'with no runner version changes' do
before do
- allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
- .and_return({ not_available: ::Gitlab::VersionInfo.new(14, 0, 1) })
+ allow(upgrade_check).to receive(:check_runner_upgrade_suggestion)
+ .and_return([::Gitlab::VersionInfo.new(14, 0, 1), :not_available])
end
it 'does not modify ci_runner_versions entries', :aggregate_failures do
- result = nil
- expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+ expect { execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
- expect(result).to eq({
- status: :success,
+ expect(execute).to be_success
+ expect(execute.payload).to eq({
total_inserted: 0,
total_updated: 0,
total_deleted: 0
@@ -100,17 +98,15 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
context 'with failing version check' do
before do
- allow(::Gitlab::Ci::RunnerUpgradeCheck.instance)
- .to receive(:check_runner_upgrade_status)
- .and_return({ error: ::Gitlab::VersionInfo.new(14, 0, 1) })
+ allow(upgrade_check).to receive(:check_runner_upgrade_suggestion)
+ .and_return([::Gitlab::VersionInfo.new(14, 0, 1), :error])
end
it 'makes no changes to ci_runner_versions', :aggregate_failures do
- result = nil
- expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+ expect { execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
- expect(result).to eq({
- status: :success,
+ expect(execute).to be_success
+ expect(execute.payload).to eq({
total_inserted: 0,
total_updated: 0,
total_deleted: 0
@@ -120,26 +116,15 @@ RSpec.describe ::Ci::Runners::ReconcileExistingRunnerVersionsService, '#execute'
end
context 'integration testing with Gitlab::Ci::RunnerUpgradeCheck' do
- let(:available_runner_releases) do
- %w[14.0.0 14.0.1]
- end
-
before do
- url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
-
- WebMock.stub_request(:get, url).to_return(
- body: available_runner_releases.map { |v| { name: v } }.to_json,
- status: 200,
- headers: { 'Content-Type' => 'application/json' }
- )
+ stub_runner_releases(%w[14.0.0 14.0.1])
end
it 'does not modify ci_runner_versions entries', :aggregate_failures do
- result = nil
- expect { result = execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
+ expect { execute }.not_to change { runner_version_14_0_1.reload.status }.from('not_available')
- expect(result).to eq({
- status: :success,
+ expect(execute).to be_success
+ expect(execute.payload).to eq({
total_inserted: 0,
total_updated: 0,
total_deleted: 0
diff --git a/spec/services/ci/runners/register_runner_service_spec.rb b/spec/services/ci/runners/register_runner_service_spec.rb
index 03dcf851e53..6d7b39de21e 100644
--- a/spec/services/ci/runners/register_runner_service_spec.rb
+++ b/spec/services/ci/runners/register_runner_service_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:registration_token) { 'abcdefg123456' }
- let(:token) { }
+ let(:token) {}
let(:args) { {} }
+ let(:runner) { execute.payload[:runner] }
before do
stub_feature_flags(runner_registration_control: false)
@@ -13,21 +14,25 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_application_setting(valid_runner_registrars: ApplicationSetting::VALID_RUNNER_REGISTRAR_TYPES)
end
- subject(:runner) { described_class.new.execute(token, args) }
+ subject(:execute) { described_class.new.execute(token, args) }
context 'when no token is provided' do
let(:token) { '' }
- it 'returns nil' do
- is_expected.to be_nil
+ it 'returns error response' do
+ expect(execute).to be_error
+ expect(execute.message).to eq 'invalid token supplied'
+ expect(execute.http_status).to eq :forbidden
end
end
context 'when invalid token is provided' do
let(:token) { 'invalid' }
- it 'returns nil' do
- is_expected.to be_nil
+ it 'returns error response' do
+ expect(execute).to be_error
+ expect(execute.message).to eq 'invalid token supplied'
+ expect(execute.http_status).to eq :forbidden
end
end
@@ -36,12 +41,14 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:token) { registration_token }
it 'creates runner with default values' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.persisted?).to be_truthy
- expect(subject.run_untagged).to be true
- expect(subject.active).to be true
- expect(subject.token).not_to eq(registration_token)
- expect(subject).to be_instance_type
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.persisted?).to be_truthy
+ expect(runner.run_untagged).to be true
+ expect(runner.active).to be true
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner).to be_instance_type
end
context 'with non-default arguments' do
@@ -67,25 +74,27 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'creates runner with specified values', :aggregate_failures do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.active).to eq args[:active]
- expect(subject.locked).to eq args[:locked]
- expect(subject.run_untagged).to eq args[:run_untagged]
- expect(subject.tags).to contain_exactly(
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.active).to eq args[:active]
+ expect(runner.locked).to eq args[:locked]
+ expect(runner.run_untagged).to eq args[:run_untagged]
+ expect(runner.tags).to contain_exactly(
an_object_having_attributes(name: 'tag1'),
an_object_having_attributes(name: 'tag2')
)
- expect(subject.access_level).to eq args[:access_level]
- expect(subject.maximum_timeout).to eq args[:maximum_timeout]
- expect(subject.name).to eq args[:name]
- expect(subject.version).to eq args[:version]
- expect(subject.revision).to eq args[:revision]
- expect(subject.platform).to eq args[:platform]
- expect(subject.architecture).to eq args[:architecture]
- expect(subject.ip_address).to eq args[:ip_address]
-
- expect(Ci::Runner.tagged_with('tag1')).to include(subject)
- expect(Ci::Runner.tagged_with('tag2')).to include(subject)
+ expect(runner.access_level).to eq args[:access_level]
+ expect(runner.maximum_timeout).to eq args[:maximum_timeout]
+ expect(runner.name).to eq args[:name]
+ expect(runner.version).to eq args[:version]
+ expect(runner.revision).to eq args[:revision]
+ expect(runner.platform).to eq args[:platform]
+ expect(runner.architecture).to eq args[:architecture]
+ expect(runner.ip_address).to eq args[:ip_address]
+
+ expect(Ci::Runner.tagged_with('tag1')).to include(runner)
+ expect(Ci::Runner.tagged_with('tag2')).to include(runner)
end
end
@@ -95,8 +104,10 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'creates runner with token expiration' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.token_expires_at).to eq(5.days.from_now)
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.token_expires_at).to eq(5.days.from_now)
end
end
end
@@ -106,12 +117,14 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:token) { project.runners_token }
it 'creates project runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
expect(project.runners.size).to eq(1)
- is_expected.to eq(project.runners.first)
- expect(subject.token).not_to eq(registration_token)
- expect(subject.token).not_to eq(project.runners_token)
- expect(subject).to be_project_type
+ expect(runner).to eq(project.runners.first)
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(project.runners_token)
+ expect(runner).to be_project_type
end
context 'when it exceeds the application limits' do
@@ -121,9 +134,13 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'does not create runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.persisted?).to be_falsey
- expect(subject.errors.messages).to eq('runner_projects.base': ['Maximum number of ci registered project runners (1) exceeded'])
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.persisted?).to be_falsey
+ expect(runner.errors.messages).to eq(
+ 'runner_projects.base': ['Maximum number of ci registered project runners (1) exceeded']
+ )
expect(project.runners.reload.size).to eq(1)
end
end
@@ -135,8 +152,10 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'creates runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.errors).to be_empty
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.errors).to be_empty
expect(project.runners.reload.size).to eq(2)
expect(project.runners.recent.size).to eq(1)
end
@@ -153,15 +172,18 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'returns 403 error' do
- is_expected.to be_nil
+ expect(execute).to be_error
+ expect(execute.http_status).to eq :forbidden
end
end
context 'when feature flag is disabled' do
it 'registers the runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.errors).to be_empty
- expect(subject.active).to be true
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.errors).to be_empty
+ expect(runner.active).to be true
end
end
end
@@ -172,12 +194,14 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
let(:token) { group.runners_token }
it 'creates a group runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.errors).to be_empty
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.errors).to be_empty
expect(group.runners.reload.size).to eq(1)
- expect(subject.token).not_to eq(registration_token)
- expect(subject.token).not_to eq(group.runners_token)
- expect(subject).to be_group_type
+ expect(runner.token).not_to eq(registration_token)
+ expect(runner.token).not_to eq(group.runners_token)
+ expect(runner).to be_group_type
end
context 'when it exceeds the application limits' do
@@ -187,9 +211,13 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'does not create runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.persisted?).to be_falsey
- expect(subject.errors.messages).to eq('runner_namespaces.base': ['Maximum number of ci registered group runners (1) exceeded'])
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.persisted?).to be_falsey
+ expect(runner.errors.messages).to eq(
+ 'runner_namespaces.base': ['Maximum number of ci registered group runners (1) exceeded']
+ )
expect(group.runners.reload.size).to eq(1)
end
end
@@ -202,8 +230,10 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
end
it 'creates runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.errors).to be_empty
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.errors).to be_empty
expect(group.runners.reload.size).to eq(3)
expect(group.runners.recent.size).to eq(1)
end
@@ -219,16 +249,18 @@ RSpec.describe ::Ci::Runners::RegisterRunnerService, '#execute' do
stub_feature_flags(runner_registration_control: true)
end
- it 'returns nil' do
- is_expected.to be_nil
+ it 'returns error response' do
+ is_expected.to be_error
end
end
context 'when feature flag is disabled' do
it 'registers the runner' do
- is_expected.to be_an_instance_of(::Ci::Runner)
- expect(subject.errors).to be_empty
- expect(subject.active).to be true
+ expect(execute).to be_success
+
+ expect(runner).to be_an_instance_of(::Ci::Runner)
+ expect(runner.errors).to be_empty
+ expect(runner.active).to be true
end
end
end
diff --git a/spec/services/ci/runners/reset_registration_token_service_spec.rb b/spec/services/ci/runners/reset_registration_token_service_spec.rb
index c4bfff51cc8..79059712032 100644
--- a/spec/services/ci/runners/reset_registration_token_service_spec.rb
+++ b/spec/services/ci/runners/reset_registration_token_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute' do
- subject { described_class.new(scope, current_user).execute }
+ subject(:execute) { described_class.new(scope, current_user).execute }
let_it_be(:user) { build(:user) }
let_it_be(:admin_user) { create(:user, :admin) }
@@ -12,20 +12,20 @@ RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute' do
context 'without user' do
let(:current_user) { nil }
- it 'does not reset registration token and returns nil' do
+ it 'does not reset registration token and returns error response' do
expect(scope).not_to receive(token_reset_method_name)
- is_expected.to be_nil
+ expect(execute).to be_error
end
end
context 'with unauthorized user' do
let(:current_user) { user }
- it 'does not reset registration token and returns nil' do
+ it 'does not reset registration token and returns error response' do
expect(scope).not_to receive(token_reset_method_name)
- is_expected.to be_nil
+ expect(execute).to be_error
end
end
@@ -37,7 +37,8 @@ RSpec.describe ::Ci::Runners::ResetRegistrationTokenService, '#execute' do
expect(scope).to receive(token_method_name).once.and_return("#{token_method_name} return value")
end
- is_expected.to eq("#{token_method_name} return value")
+ expect(execute).to be_success
+ expect(execute.payload[:new_registration_token]).to eq("#{token_method_name} return value")
end
end
end
diff --git a/spec/services/ci/runners/unassign_runner_service_spec.rb b/spec/services/ci/runners/unassign_runner_service_spec.rb
index 3fb6925f4bd..cf710cf6893 100644
--- a/spec/services/ci/runners/unassign_runner_service_spec.rb
+++ b/spec/services/ci/runners/unassign_runner_service_spec.rb
@@ -3,21 +3,21 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute' do
- subject(:service) { described_class.new(runner_project, user).execute }
-
- let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
let_it_be(:project) { create(:project) }
+ let_it_be(:runner) { create(:ci_runner, :project, projects: [project]) }
let(:runner_project) { runner.runner_projects.last }
+ subject(:execute) { described_class.new(runner_project, user).execute }
+
context 'without user' do
let(:user) { nil }
it 'does not destroy runner_project', :aggregate_failures do
expect(runner_project).not_to receive(:destroy)
- expect { service }.not_to change { runner.runner_projects.count }.from(1)
+ expect { execute }.not_to change { runner.runner_projects.count }.from(1)
- is_expected.to eq(false)
+ is_expected.to be_error
end
end
@@ -27,17 +27,27 @@ RSpec.describe ::Ci::Runners::UnassignRunnerService, '#execute' do
it 'does not call destroy on runner_project' do
expect(runner).not_to receive(:destroy)
- service
+ is_expected.to be_error
end
end
context 'with admin user', :enable_admin_mode do
let(:user) { create_default(:user, :admin) }
- it 'destroys runner_project' do
- expect(runner_project).to receive(:destroy).once
+ context 'with destroy returning false' do
+ it 'returns error response' do
+ expect(runner_project).to receive(:destroy).once.and_return(false)
+
+ is_expected.to be_error
+ end
+ end
+
+ context 'with destroy returning true' do
+ it 'returns success response' do
+ expect(runner_project).to receive(:destroy).once.and_return(true)
- service
+ is_expected.to be_success
+ end
end
end
end
diff --git a/spec/services/ci/runners/unregister_runner_service_spec.rb b/spec/services/ci/runners/unregister_runner_service_spec.rb
index df1a0a90067..77fc299e4e1 100644
--- a/spec/services/ci/runners/unregister_runner_service_spec.rb
+++ b/spec/services/ci/runners/unregister_runner_service_spec.rb
@@ -3,13 +3,16 @@
require 'spec_helper'
RSpec.describe ::Ci::Runners::UnregisterRunnerService, '#execute' do
- subject { described_class.new(runner, 'some_token').execute }
+ subject(:execute) { described_class.new(runner, 'some_token').execute }
let(:runner) { create(:ci_runner) }
it 'destroys runner' do
expect(runner).to receive(:destroy).once.and_call_original
- expect { subject }.to change { Ci::Runner.count }.by(-1)
+
+ expect do
+ expect(execute).to be_success
+ end.to change { Ci::Runner.count }.by(-1)
expect(runner[:errors]).to be_nil
end
end
diff --git a/spec/services/ci/runners/update_runner_service_spec.rb b/spec/services/ci/runners/update_runner_service_spec.rb
index b02ea8f58b0..e008fde9982 100644
--- a/spec/services/ci/runners/update_runner_service_spec.rb
+++ b/spec/services/ci/runners/update_runner_service_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Ci::Runners::UpdateRunnerService do
end
context 'with cost factor params' do
- let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 }}
+ let(:params) { { public_projects_minutes_cost_factor: 1.1, private_projects_minutes_cost_factor: 2.2 } }
it 'updates the runner cost factors' do
expect(update).to be_truthy
diff --git a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
index ebc57af77a0..a452a65829a 100644
--- a/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_pending_service_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Ci::StuckBuilds::DropPendingService do
create(:ci_build, pipeline: pipeline, runner: runner)
end
- let(:created_at) { }
- let(:updated_at) { }
+ let(:created_at) {}
+ let(:updated_at) {}
subject(:service) { described_class.new }
diff --git a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
index 1416fab3d25..a4f9f97fffc 100644
--- a/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
+++ b/spec/services/ci/stuck_builds/drop_scheduled_service_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Ci::StuckBuilds::DropScheduledService do
end
context 'when there are no stale scheduled builds' do
- let(:job) { }
+ let(:job) {}
it 'does not drop the stale scheduled build yet' do
expect { service.execute }.not_to raise_error
diff --git a/spec/services/ci/track_failed_build_service_spec.rb b/spec/services/ci/track_failed_build_service_spec.rb
new file mode 100644
index 00000000000..d83e56f0669
--- /dev/null
+++ b/spec/services/ci/track_failed_build_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::TrackFailedBuildService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+
+ let_it_be(:exit_code) { 42 }
+ let_it_be(:failure_reason) { "script_failure" }
+
+ describe '#execute' do
+ context 'when a build has failed' do
+ let_it_be(:build) { create(:ci_build, :failed, :sast_report, pipeline: pipeline, user: user) }
+
+ subject { described_class.new(build: build, exit_code: exit_code, failure_reason: failure_reason) }
+
+ it 'tracks the build failed event', :snowplow do
+ response = subject.execute
+
+ expect(response.success?).to be true
+
+ expect_snowplow_event(
+ category: 'ci::build',
+ action: 'failed',
+ context: [{
+ schema: described_class::SCHEMA_URL,
+ data: {
+ build_id: build.id,
+ build_name: build.name,
+ build_artifact_types: ["sast"],
+ exit_code: exit_code,
+ failure_reason: failure_reason
+ }
+ }],
+ user: user,
+ project: project.id)
+ end
+ end
+
+ context 'when a build has not failed' do
+ let_it_be(:build) { create(:ci_build, :success, :sast_report, pipeline: pipeline, user: user) }
+
+ subject { described_class.new(build: build, exit_code: nil, failure_reason: nil) }
+
+ it 'does not track the build failed event', :snowplow do
+ response = subject.execute
+
+ expect(response.error?).to be true
+
+ expect_no_snowplow_event
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/update_build_state_service_spec.rb b/spec/services/ci/update_build_state_service_spec.rb
index 937b19beff5..90a86e7ae59 100644
--- a/spec/services/ci/update_build_state_service_spec.rb
+++ b/spec/services/ci/update_build_state_service_spec.rb
@@ -33,6 +33,24 @@ RSpec.describe Ci::UpdateBuildStateService do
end
end
+ context 'when build has failed' do
+ let(:params) do
+ {
+ output: { checksum: 'crc32:12345678', bytesize: 123 },
+ state: 'failed',
+ failure_reason: 'script_failure',
+ exit_code: 7
+ }
+ end
+
+ it 'sends a build failed event to Snowplow' do
+ expect(::Ci::TrackFailedBuildWorker)
+ .to receive(:perform_async).with(build.id, params[:exit_code], params[:failure_reason])
+
+ subject.execute
+ end
+ end
+
context 'when build does not have checksum' do
context 'when state has changed' do
let(:params) { { state: 'success' } }
diff --git a/spec/services/clusters/integrations/create_service_spec.rb b/spec/services/clusters/integrations/create_service_spec.rb
index 016511a3c01..9104e07504d 100644
--- a/spec/services/clusters/integrations/create_service_spec.rb
+++ b/spec/services/clusters/integrations/create_service_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe Clusters::Integrations::CreateService, '#execute' do
end
it 'errors' do
- expect { service.execute}.to raise_error(ArgumentError)
+ expect { service.execute }.to raise_error(ArgumentError)
end
end
diff --git a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
index 7147f1b9b28..526462931a6 100644
--- a/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
+++ b/spec/services/clusters/integrations/prometheus_health_check_service_spec.rb
@@ -51,6 +51,7 @@ RSpec.describe Clusters::Integrations::PrometheusHealthCheckService, '#execute'
let(:prometheus_enabled) { false }
it { expect(subject).to eq(nil) }
+
include_examples 'no alert'
end
diff --git a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
index a4f018aec0c..064f9e42e96 100644
--- a/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
+++ b/spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
context 'With RBAC enabled cluster' do
let(:rbac) { true }
- let(:role_binding_name) { "gitlab-#{namespace}"}
+ let(:role_binding_name) { "gitlab-#{namespace}" }
before do
cluster.platform_kubernetes.rbac!
diff --git a/spec/services/database/consistency_check_service_spec.rb b/spec/services/database/consistency_check_service_spec.rb
index 2e642451432..6695e4b5e9f 100644
--- a/spec/services/database/consistency_check_service_spec.rb
+++ b/spec/services/database/consistency_check_service_spec.rb
@@ -24,9 +24,27 @@ RSpec.describe Database::ConsistencyCheckService do
)
end
- describe '#random_start_id' do
- let(:batch_size) { 5 }
+ describe '#min_id' do
+ before do
+ create_list(:namespace, 3)
+ end
+ it 'returns the id of the first record in the database' do
+ expect(subject.send(:min_id)).to eq(Namespace.first.id)
+ end
+ end
+
+ describe '#max_id' do
+ before do
+ create_list(:namespace, 3)
+ end
+
+ it 'returns the id of the first record in the database' do
+ expect(subject.send(:max_id)).to eq(Namespace.last.id)
+ end
+ end
+
+ describe '#random_start_id' do
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
end
@@ -58,12 +76,11 @@ RSpec.describe Database::ConsistencyCheckService do
end
context 'no cursor has been saved before' do
- let(:selected_start_id) { Namespace.order(:id).limit(5).pluck(:id).last }
- let(:expected_next_start_id) { selected_start_id + batch_size * max_batches }
+ let(:min_id) { Namespace.first.id }
+ let(:max_id) { Namespace.last.id }
before do
create_list(:namespace, 50) # This will also create Ci::NameSpaceMirror objects
- expect(consistency_check_service).to receive(:random_start_id).and_return(selected_start_id)
end
it 'picks a random start_id' do
@@ -72,17 +89,21 @@ RSpec.describe Database::ConsistencyCheckService do
matches: 10,
mismatches: 0,
mismatches_details: [],
- start_id: selected_start_id,
- next_start_id: expected_next_start_id
+ start_id: be_between(min_id, max_id),
+ next_start_id: be_between(min_id, max_id)
}
- expect(consistency_check_service.execute).to eq(expected_result)
+ expect(consistency_check_service).to receive(:rand).with(min_id..max_id).and_call_original
+ result = consistency_check_service.execute
+ expect(result).to match(expected_result)
end
it 'calls the ConsistencyCheckService with the expected parameters' do
+ expect(consistency_check_service).to receive(:random_start_id).and_return(min_id)
+
allow_next_instance_of(Gitlab::Database::ConsistencyChecker) do |instance|
- expect(instance).to receive(:execute).with(start_id: selected_start_id).and_return({
+ expect(instance).to receive(:execute).with(start_id: min_id).and_return({
batches: 2,
- next_start_id: expected_next_start_id,
+ next_start_id: min_id + batch_size,
matches: 10,
mismatches: 0,
mismatches_details: []
@@ -98,17 +119,19 @@ RSpec.describe Database::ConsistencyCheckService do
expected_result = {
batches: 2,
- start_id: selected_start_id,
- next_start_id: expected_next_start_id,
matches: 10,
mismatches: 0,
- mismatches_details: []
+ mismatches_details: [],
+ start_id: be_between(min_id, max_id),
+ next_start_id: be_between(min_id, max_id)
}
- expect(consistency_check_service.execute).to eq(expected_result)
+ result = consistency_check_service.execute
+ expect(result).to match(expected_result)
end
it 'saves the next_start_id in Redis for he next iteration' do
- expect(consistency_check_service).to receive(:save_next_start_id).with(expected_next_start_id).and_call_original
+ expect(consistency_check_service).to receive(:save_next_start_id)
+ .with(be_between(min_id, max_id)).and_call_original
consistency_check_service.execute
end
end
diff --git a/spec/services/deployments/create_for_build_service_spec.rb b/spec/services/deployments/create_for_build_service_spec.rb
index 38d94580512..a2e1acadcc1 100644
--- a/spec/services/deployments/create_for_build_service_spec.rb
+++ b/spec/services/deployments/create_for_build_service_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Deployments::CreateForBuildService do
end
context 'when the corresponding environment does not exist' do
- let!(:environment) { }
+ let!(:environment) {}
it 'does not create a deployment record' do
expect { subject }.not_to change { Deployment.count }
diff --git a/spec/services/deployments/update_environment_service_spec.rb b/spec/services/deployments/update_environment_service_spec.rb
index 8ab53a37a33..4485ce585bb 100644
--- a/spec/services/deployments/update_environment_service_spec.rb
+++ b/spec/services/deployments/update_environment_service_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
end
context 'when external URL is invalid' do
- let(:external_url) { 'google.com' }
+ let(:external_url) { 'javascript:alert("hello")' }
it 'fails to update the tier due to validation error' do
expect { subject.execute }.not_to change { environment.tier }
@@ -123,7 +123,7 @@ RSpec.describe Deployments::UpdateEnvironmentService do
.with(an_instance_of(described_class::EnvironmentUpdateFailure),
project_id: project.id,
environment_id: environment.id,
- reason: %q{External url is blocked: Only allowed schemes are http, https})
+ reason: %q{External url javascript scheme is not allowed})
.once
subject.execute
@@ -307,14 +307,6 @@ RSpec.describe Deployments::UpdateEnvironmentService do
end
it { is_expected.to eq('http://appname-master.example.com') }
-
- context 'when the FF ci_expand_environment_name_and_url is disabled' do
- before do
- stub_feature_flags(ci_expand_environment_name_and_url: false)
- end
-
- it { is_expected.to eq('http://${STACK_NAME}.example.com') }
- end
end
context 'when yaml environment does not have url' do
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index bc7625d7c28..a0e049ea42a 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -59,7 +59,11 @@ RSpec.describe DesignManagement::DeleteDesignsService do
it_behaves_like "a service error"
it 'does not create any events in the activity stream' do
- expect { run_service rescue nil }.not_to change { Event.count }
+ expect do
+ run_service
+ rescue StandardError
+ nil
+ end.not_to change { Event.count }
end
end
@@ -78,7 +82,11 @@ RSpec.describe DesignManagement::DeleteDesignsService do
it 'does not log any events' do
counter = ::Gitlab::UsageDataCounters::DesignsCounter
- expect { run_service rescue nil }
+ expect do
+ run_service
+ rescue StandardError
+ nil
+ end
.not_to change { [counter.totals, Event.count] }
end
@@ -86,10 +94,18 @@ RSpec.describe DesignManagement::DeleteDesignsService do
redis_hll = ::Gitlab::UsageDataCounters::HLLRedisCounter
event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_DESIGNS_REMOVED
- expect { run_service rescue nil }
+ expect do
+ run_service
+ rescue StandardError
+ nil
+ end
.not_to change { redis_hll.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }
- run_service rescue nil
+ begin
+ run_service
+ rescue StandardError
+ nil
+ end
end
end
diff --git a/spec/services/design_management/generate_image_versions_service_spec.rb b/spec/services/design_management/generate_image_versions_service_spec.rb
index e06b6fbf116..5409ec12016 100644
--- a/spec/services/design_management/generate_image_versions_service_spec.rb
+++ b/spec/services/design_management/generate_image_versions_service_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe DesignManagement::GenerateImageVersionsService do
end
it 'skips generating image versions if the mime type is not whitelisted' do
- stub_const('DesignManagement::DesignV432x230Uploader::MIME_TYPE_WHITELIST', [])
+ stub_const('DesignManagement::DesignV432x230Uploader::MIME_TYPE_ALLOWLIST', [])
described_class.new(version).execute
diff --git a/spec/services/git/branch_push_service_spec.rb b/spec/services/git/branch_push_service_spec.rb
index 8d41b20c8a9..6280f1263c3 100644
--- a/spec/services/git/branch_push_service_spec.rb
+++ b/spec/services/git/branch_push_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Git::BranchPushService, services: true do
include RepoHelpers
let_it_be(:user) { create(:user) }
- let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be_with_refind(:project) { create(:project, :repository) }
let(:blankrev) { Gitlab::Git::BLANK_SHA }
let(:oldrev) { sample_commit.parent_id }
@@ -573,7 +573,7 @@ RSpec.describe Git::BranchPushService, services: true do
before do
allow(project).to receive(:default_branch).and_return('feature')
- expect(project).to receive(:change_head) { 'feature'}
+ expect(project).to receive(:change_head) { 'feature' }
end
it 'push to first branch updates HEAD' do
diff --git a/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
new file mode 100644
index 00000000000..cd0dd75e576
--- /dev/null
+++ b/spec/services/google_cloud/create_cloudsql_instance_service_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::CreateCloudsqlInstanceService do
+ let(:project) { create(:project) }
+ let(:user) { create(:user) }
+ let(:gcp_project_id) { 'gcp_project_120' }
+ let(:environment_name) { 'test_env_42' }
+ let(:database_version) { 'POSTGRES_8000' }
+ let(:tier) { 'REIT_TIER' }
+ let(:service) do
+ described_class.new(project, user, {
+ gcp_project_id: gcp_project_id,
+ environment_name: environment_name,
+ database_version: database_version,
+ tier: tier
+ })
+ end
+
+ describe '#execute' do
+ before do
+ allow_next_instance_of(::Ci::VariablesFinder) do |variable_finder|
+ allow(variable_finder).to receive(:execute).and_return([])
+ end
+ end
+
+ it 'triggers creation of a cloudsql instance' do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ expected_instance_name = "gitlab-#{project.id}-postgres-8000-test-env-42"
+ expect(client).to receive(:create_cloudsql_instance)
+ .with(gcp_project_id,
+ expected_instance_name,
+ String,
+ database_version,
+ 'us-east1',
+ tier)
+ end
+
+ result = service.execute
+ expect(result[:status]).to be(:success)
+ end
+
+ it 'triggers worker to manage cloudsql instance creation operation results' do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ expect(client).to receive(:create_cloudsql_instance)
+ end
+
+ expect(GoogleCloud::CreateCloudsqlInstanceWorker).to receive(:perform_in)
+
+ result = service.execute
+ expect(result[:status]).to be(:success)
+ end
+
+ context 'when google APIs fail' do
+ it 'returns error' do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ expect(client).to receive(:create_cloudsql_instance).and_raise(Google::Apis::Error.new('mock-error'))
+ end
+
+ result = service.execute
+ expect(result[:status]).to eq(:error)
+ end
+ end
+
+ context 'when project has GCP_REGION defined' do
+ let(:gcp_region) { instance_double(::Ci::Variable, key: 'GCP_REGION', value: 'user-defined-region') }
+
+ before do
+ allow_next_instance_of(::Ci::VariablesFinder) do |variable_finder|
+ allow(variable_finder).to receive(:execute).and_return([gcp_region])
+ end
+ end
+
+ it 'uses defined region' do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |client|
+ expect(client).to receive(:create_cloudsql_instance)
+ .with(gcp_project_id,
+ String,
+ String,
+ database_version,
+ 'user-defined-region',
+ tier)
+ end
+
+ service.execute
+ end
+ end
+ end
+end
diff --git a/spec/services/google_cloud/enable_cloudsql_service_spec.rb b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
new file mode 100644
index 00000000000..e54e5a8d446
--- /dev/null
+++ b/spec/services/google_cloud/enable_cloudsql_service_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::EnableCloudsqlService do
+ let_it_be(:project) { create(:project) }
+
+ subject(:result) { described_class.new(project).execute }
+
+ context 'when a project does not have any GCP_PROJECT_IDs configured' do
+ it 'returns error' do
+ message = 'No GCP projects found. Configure a service account or GCP_PROJECT_ID CI variable.'
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq(message)
+ end
+ end
+
+ context 'when a project has GCP_PROJECT_IDs configured' do
+ before do
+ project.variables.build(environment_scope: 'production', key: 'GCP_PROJECT_ID', value: 'prj-prod')
+ project.variables.build(environment_scope: 'staging', key: 'GCP_PROJECT_ID', value: 'prj-staging')
+ project.save!
+ end
+
+ it 'enables cloudsql, compute and service networking Google APIs', :aggregate_failures do
+ expect_next_instance_of(GoogleApi::CloudPlatform::Client) do |instance|
+ expect(instance).to receive(:enable_cloud_sql_admin).with('prj-prod')
+ expect(instance).to receive(:enable_compute).with('prj-prod')
+ expect(instance).to receive(:enable_service_networking).with('prj-prod')
+ expect(instance).to receive(:enable_cloud_sql_admin).with('prj-staging')
+ expect(instance).to receive(:enable_compute).with('prj-staging')
+ expect(instance).to receive(:enable_service_networking).with('prj-staging')
+ end
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
+end
diff --git a/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
new file mode 100644
index 00000000000..4587a5077c0
--- /dev/null
+++ b/spec/services/google_cloud/get_cloudsql_instances_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GoogleCloud::GetCloudsqlInstancesService do
+ let(:service) { described_class.new(project) }
+ let(:project) { create(:project) }
+
+ context 'when project has no registered cloud sql instances' do
+ it 'result is empty' do
+ expect(service.execute.length).to eq(0)
+ end
+ end
+
+ context 'when project has registered cloud sql instance' do
+ before do
+ keys = %w[
+ GCP_PROJECT_ID
+ GCP_CLOUDSQL_INSTANCE_NAME
+ GCP_CLOUDSQL_CONNECTION_NAME
+ GCP_CLOUDSQL_PRIMARY_IP_ADDRESS
+ GCP_CLOUDSQL_VERSION
+ GCP_CLOUDSQL_DATABASE_NAME
+ GCP_CLOUDSQL_DATABASE_USER
+ GCP_CLOUDSQL_DATABASE_PASS
+ ]
+
+ envs = %w[
+ *
+ STG
+ PRD
+ ]
+
+ keys.each do |key|
+ envs.each do |env|
+ project.variables.build(protected: false, environment_scope: env, key: key, value: "value-#{key}-#{env}")
+ end
+ end
+ end
+
+ it 'result is grouped by environment', :aggregate_failures do
+ expect(service.execute).to contain_exactly({
+ ref: '*',
+ gcp_project: 'value-GCP_PROJECT_ID-*',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-*',
+ version: 'value-GCP_CLOUDSQL_VERSION-*'
+ },
+ {
+ ref: 'STG',
+ gcp_project: 'value-GCP_PROJECT_ID-STG',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-STG',
+ version: 'value-GCP_CLOUDSQL_VERSION-STG'
+ },
+ {
+ ref: 'PRD',
+ gcp_project: 'value-GCP_PROJECT_ID-PRD',
+ instance_name: 'value-GCP_CLOUDSQL_INSTANCE_NAME-PRD',
+ version: 'value-GCP_CLOUDSQL_VERSION-PRD'
+ })
+ end
+ end
+end
diff --git a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
index 55553097423..e0a622bfa4a 100644
--- a/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
+++ b/spec/services/google_cloud/setup_cloudsql_instance_service_spec.rb
@@ -5,6 +5,21 @@ require 'spec_helper'
RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
let(:random_user) { create(:user) }
let(:project) { create(:project) }
+ let(:list_databases_empty) { Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: []) }
+ let(:list_users_empty) { Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: []) }
+ let(:list_databases) do
+ Google::Apis::SqladminV1beta4::ListDatabasesResponse.new(items: [
+ Google::Apis::SqladminV1beta4::Database.new(name: 'postgres'),
+ Google::Apis::SqladminV1beta4::Database.new(name: 'main_db')
+ ])
+ end
+
+ let(:list_users) do
+ Google::Apis::SqladminV1beta4::ListUsersResponse.new(items: [
+ Google::Apis::SqladminV1beta4::User.new(name: 'postgres'),
+ Google::Apis::SqladminV1beta4::User.new(name: 'main_user')
+ ])
+ end
context 'when unauthorized user triggers worker' do
subject do
@@ -76,6 +91,8 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_fail)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases_empty)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users_empty)
end
message = subject[:message]
@@ -92,6 +109,8 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_fail)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases_empty)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users_empty)
end
message = subject[:message]
@@ -102,12 +121,59 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
end
end
+ context 'when database and user already exist' do
+ it 'does not try to create a database or user' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).not_to receive(:create_cloudsql_database)
+ expect(google_api_client).not_to receive(:create_cloudsql_user)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users)
+ end
+
+ status = subject[:status]
+ expect(status).to eq(:success)
+ end
+ end
+
+ context 'when database already exists' do
+ it 'does not try to create a database' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).not_to receive(:create_cloudsql_database)
+ expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_done)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users_empty)
+ end
+
+ status = subject[:status]
+ expect(status).to eq(:success)
+ end
+ end
+
+ context 'when user already exists' do
+ it 'does not try to create a user' do
+ allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
+ expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
+ expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
+ expect(google_api_client).not_to receive(:create_cloudsql_user)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases_empty)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users)
+ end
+
+ status = subject[:status]
+ expect(status).to eq(:success)
+ end
+ end
+
context 'when database and user creation succeeds' do
it 'stores project CI vars' do
allow_next_instance_of(GoogleApi::CloudPlatform::Client) do |google_api_client|
expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_done)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases_empty)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users_empty)
end
subject
@@ -143,6 +209,8 @@ RSpec.describe GoogleCloud::SetupCloudsqlInstanceService do
expect(google_api_client).to receive(:get_cloudsql_instance).and_return(get_instance_response_runnable)
expect(google_api_client).to receive(:create_cloudsql_database).and_return(operation_done)
expect(google_api_client).to receive(:create_cloudsql_user).and_return(operation_done)
+ expect(google_api_client).to receive(:list_cloudsql_databases).and_return(list_databases_empty)
+ expect(google_api_client).to receive(:list_cloudsql_users).and_return(list_users_empty)
end
subject
diff --git a/spec/services/groups/create_service_spec.rb b/spec/services/groups/create_service_spec.rb
index 6e074f451c4..0cfde9ef434 100644
--- a/spec/services/groups/create_service_spec.rb
+++ b/spec/services/groups/create_service_spec.rb
@@ -176,6 +176,15 @@ RSpec.describe Groups::CreateService, '#execute' do
end
end
+ describe 'creating a details record' do
+ let(:service) { described_class.new(user, group_params) }
+
+ it 'create the details record connected to the group' do
+ group = subject
+ expect(group.namespace_details).to be_persisted
+ end
+ end
+
describe 'create service for the group' do
let(:service) { described_class.new(user, group_params) }
let(:created_group) { service.execute }
diff --git a/spec/services/groups/destroy_service_spec.rb b/spec/services/groups/destroy_service_spec.rb
index f43f64fdf89..0d699dd447b 100644
--- a/spec/services/groups/destroy_service_spec.rb
+++ b/spec/services/groups/destroy_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Groups::DestroyService do
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
let!(:project) { create(:project, :repository, :legacy_storage, namespace: group) }
- let!(:notification_setting) { create(:notification_setting, source: group)}
+ let!(:notification_setting) { create(:notification_setting, source: group) }
let(:gitlab_shell) { Gitlab::Shell.new }
let(:remove_path) { group.path + "+#{group.id}+deleted" }
@@ -74,6 +74,17 @@ RSpec.describe Groups::DestroyService do
end
end
end
+
+ context 'event store', :sidekiq_might_not_need_inline do
+ it 'publishes a GroupDeletedEvent' do
+ expect { destroy_group(group, user, async) }
+ .to publish_event(Groups::GroupDeletedEvent)
+ .with(
+ group_id: group.id,
+ root_namespace_id: group.root_ancestor.id
+ )
+ end
+ end
end
describe 'asynchronous delete' do
@@ -271,7 +282,7 @@ RSpec.describe Groups::DestroyService do
end
context 'the shared_with group is deleted' do
- let!(:group2_subgroup) { create(:group, :private, parent: group2)}
+ let!(:group2_subgroup) { create(:group, :private, parent: group2) }
let!(:group2_subgroup_project) { create(:project, :private, group: group2_subgroup) }
it 'updates project authorizations so users of both groups lose access', :aggregate_failures do
diff --git a/spec/services/groups/merge_requests_count_service_spec.rb b/spec/services/groups/merge_requests_count_service_spec.rb
index 10c7ba5fca4..8bd350d6f0e 100644
--- a/spec/services/groups/merge_requests_count_service_spec.rb
+++ b/spec/services/groups/merge_requests_count_service_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Groups::MergeRequestsCountService, :use_clean_rails_memory_store_caching do
let_it_be(:user) { create(:user) }
- let_it_be(:group) { create(:group, :public)}
+ let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :repository, namespace: group) }
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
index fca09bfdebe..923caa6c150 100644
--- a/spec/services/groups/open_issues_count_service_spec.rb
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_caching do
- let_it_be(:group) { create(:group, :public)}
+ let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, namespace: group) }
let_it_be(:user) { create(:user) }
let_it_be(:issue) { create(:issue, :opened, project: project) }
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index fbcca215282..b543661e9a0 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -22,6 +22,18 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let(:transfer_service) { described_class.new(group, user) }
+ shared_examples 'publishes a GroupTransferedEvent' do
+ it do
+ expect { transfer_service.execute(target) }
+ .to publish_event(Groups::GroupTransferedEvent)
+ .with(
+ group_id: group.id,
+ old_root_namespace_id: group.root_ancestor.id,
+ new_root_namespace_id: target.root_ancestor.id
+ )
+ end
+ end
+
context 'handling packages' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:new_group) { create(:group, :public) }
@@ -895,6 +907,10 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect { transfer_service.execute(root_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
+
+ it_behaves_like 'publishes a GroupTransferedEvent' do
+ let(:target) { root_group }
+ end
end
context 'moving down' do
@@ -904,6 +920,10 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect { transfer_service.execute(another_subgroup) }
.not_to change { CustomerRelations::IssueContact.count }
end
+
+ it_behaves_like 'publishes a GroupTransferedEvent' do
+ let(:target) { another_subgroup }
+ end
end
context 'moving sideways' do
@@ -913,6 +933,10 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect { transfer_service.execute(another_subgroup) }
.not_to change { CustomerRelations::IssueContact.count }
end
+
+ it_behaves_like 'publishes a GroupTransferedEvent' do
+ let(:target) { another_subgroup }
+ end
end
context 'moving to new root group' do
@@ -932,6 +956,10 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect { transfer_service.execute(new_parent_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
+
+ it_behaves_like 'publishes a GroupTransferedEvent' do
+ let(:target) { new_parent_group }
+ end
end
context 'moving to a subgroup within a new root group' do
@@ -953,6 +981,10 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect { transfer_service.execute(subgroup_in_new_parent_group) }
.not_to change { CustomerRelations::IssueContact.count }
end
+
+ it_behaves_like 'publishes a GroupTransferedEvent' do
+ let(:target) { subgroup_in_new_parent_group }
+ end
end
context 'with permission on the subgroup' do
@@ -965,6 +997,11 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
expect(transfer_service.error).to eq("Transfer failed: Group contains contacts/organizations and you don't have enough permissions to move them to the new root group.")
end
+
+ it 'does not publish a GroupTransferedEvent' do
+ expect { transfer_service.execute(subgroup_in_new_parent_group) }
+ .not_to publish_event(Groups::GroupTransferedEvent)
+ end
end
end
end
diff --git a/spec/services/groups/update_service_spec.rb b/spec/services/groups/update_service_spec.rb
index 856dd4a2567..5c87b9ac8bb 100644
--- a/spec/services/groups/update_service_spec.rb
+++ b/spec/services/groups/update_service_spec.rb
@@ -339,8 +339,44 @@ RSpec.describe Groups::UpdateService do
end
end
+ context 'EventStore' do
+ let(:service) { described_class.new(group, user, **params) }
+ let(:root_group) { create(:group, path: 'root') }
+ let(:group) do
+ create(:group, parent: root_group, path: 'old-path').tap { |g| g.add_owner(user) }
+ end
+
+ context 'when changing a group path' do
+ let(:new_path) { SecureRandom.hex }
+ let(:params) { { path: new_path } }
+
+ it 'publishes a GroupPathChangedEvent' do
+ old_path = group.full_path
+
+ expect { service.execute }
+ .to publish_event(Groups::GroupPathChangedEvent)
+ .with(
+ group_id: group.id,
+ root_namespace_id: group.root_ancestor.id,
+ old_path: old_path,
+ new_path: "root/#{new_path}"
+ )
+ end
+ end
+
+ context 'when not changing a group path' do
+ let(:params) { { name: 'very-new-name' } }
+
+ it 'does not publish a GroupPathChangedEvent' do
+ expect { service.execute }
+ .not_to publish_event(Groups::GroupPathChangedEvent)
+ end
+ end
+ end
+
context 'rename group' do
- let!(:service) { described_class.new(internal_group, user, path: SecureRandom.hex) }
+ let(:new_path) { SecureRandom.hex }
+ let!(:service) { described_class.new(internal_group, user, path: new_path) }
before do
internal_group.add_member(user, Gitlab::Access::MAINTAINER)
@@ -371,7 +407,7 @@ RSpec.describe Groups::UpdateService do
end
it "hasn't changed the path" do
- expect { service.execute}.not_to change { internal_group.reload.path}
+ expect { service.execute }.not_to change { internal_group.reload.path }
end
end
end
diff --git a/spec/services/groups/update_statistics_service_spec.rb b/spec/services/groups/update_statistics_service_spec.rb
index 5bef51c2727..84b18b670a7 100644
--- a/spec/services/groups/update_statistics_service_spec.rb
+++ b/spec/services/groups/update_statistics_service_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe Groups::UpdateStatisticsService do
let(:statistics) { %w(wiki_size) }
- subject(:service) { described_class.new(group, statistics: statistics)}
+ subject(:service) { described_class.new(group, statistics: statistics) }
describe '#execute', :aggregate_failures do
context 'when group is nil' do
diff --git a/spec/services/import/fogbugz_service_spec.rb b/spec/services/import/fogbugz_service_spec.rb
index c9477dba7a5..7b86c5c45b0 100644
--- a/spec/services/import/fogbugz_service_spec.rb
+++ b/spec/services/import/fogbugz_service_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe Import::FogbugzService do
let(:error_messages_array) { instance_double(Array, join: "something went wrong") }
let(:errors_double) { instance_double(ActiveModel::Errors, full_messages: error_messages_array, :[] => nil) }
let(:project_double) { instance_double(Project, persisted?: false, errors: errors_double) }
- let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double )}
+ let(:project_creator) { instance_double(Gitlab::FogbugzImport::ProjectCreator, execute: project_double ) }
before do
allow(Gitlab::FogbugzImport::ProjectCreator).to receive(:new).and_return(project_creator)
diff --git a/spec/services/import/prepare_service_spec.rb b/spec/services/import/prepare_service_spec.rb
new file mode 100644
index 00000000000..0097198f7a9
--- /dev/null
+++ b/spec/services/import/prepare_service_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Import::PrepareService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:file) { double }
+ let(:upload_service) { double }
+ let(:uploader) { double }
+ let(:upload) { double }
+
+ let(:service) { described_class.new(project, user, file: file) }
+
+ subject { service.execute }
+
+ context 'when file is uploaded correctly' do
+ let(:upload_id) { 99 }
+
+ before do
+ mock_upload
+ end
+
+ it 'raises NotImplemented error for worker' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+
+ context 'when a job is enqueued' do
+ before do
+ worker = double
+
+ allow(service).to receive(:worker).and_return(worker)
+ allow(worker).to receive(:perform_async)
+ end
+
+ it 'raises NotImplemented error for success_message when a job is enqueued' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+
+ it 'returns a success respnse when a success_message is implemented' do
+ message = 'It works!'
+
+ allow(service).to receive(:success_message).and_return(message)
+
+ result = subject
+
+ expect(result).to be_success
+ expect(result.message).to eq(message)
+ end
+ end
+ end
+
+ context 'when file upload fails' do
+ before do
+ mock_upload(false)
+ end
+
+ it 'returns an error message' do
+ result = subject
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('File upload error.')
+ end
+ end
+end
diff --git a/spec/services/import/validate_remote_git_endpoint_service_spec.rb b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
index 9dc862b6ca3..221ac2cd73a 100644
--- a/spec/services/import/validate_remote_git_endpoint_service_spec.rb
+++ b/spec/services/import/validate_remote_git_endpoint_service_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Import::ValidateRemoteGitEndpointService do
end
context 'when uri is using git:// protocol' do
- subject { described_class.new(url: 'git://demo.host/repo')}
+ subject { described_class.new(url: 'git://demo.host/repo') }
it 'returns success' do
result = subject.execute
diff --git a/spec/services/incident_management/timeline_events/create_service_spec.rb b/spec/services/incident_management/timeline_events/create_service_spec.rb
index a4e928b98f4..b999403e168 100644
--- a/spec/services/incident_management/timeline_events/create_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/create_service_spec.rb
@@ -244,5 +244,88 @@ RSpec.describe IncidentManagement::TimelineEvents::CreateService do
it_behaves_like 'successfully created timeline event'
end
+
+ describe '.change_labels' do
+ subject(:execute) do
+ described_class.change_labels(incident, current_user, added_labels: added, removed_labels: removed)
+ end
+
+ let_it_be(:labels) { create_list(:label, 4, project: project) }
+
+ let(:expected_action) { 'label' }
+
+ context 'when there are neither added nor removed labels' do
+ let(:added) { [] }
+ let(:removed) { [] }
+
+ it 'responds with error', :aggregate_failures do
+ expect(execute).to be_error
+ expect(execute.message).to eq(_('There are no changed labels'))
+ end
+
+ it 'does not create timeline event' do
+ expect { execute }.not_to change { incident.incident_management_timeline_events.count }
+ end
+ end
+
+ context 'when there are only added labels' do
+ let(:added) { [labels[0], labels[1]] }
+ let(:removed) { [] }
+
+ let(:expected_note) { "@#{current_user.username} added #{added.map(&:to_reference).join(' ')} labels" }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ context 'when there are only removed labels' do
+ let(:added) { [] }
+ let(:removed) { [labels[2], labels[3]] }
+
+ let(:expected_note) { "@#{current_user.username} removed #{removed.map(&:to_reference).join(' ')} labels" }
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ context 'when there are both added and removed labels' do
+ let(:added) { [labels[0], labels[1]] }
+ let(:removed) { [labels[2], labels[3]] }
+
+ let(:expected_note) do
+ added_note = "added #{added.map(&:to_reference).join(' ')} labels"
+ removed_note = "removed #{removed.map(&:to_reference).join(' ')} labels"
+
+ "@#{current_user.username} #{added_note} and #{removed_note}"
+ end
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ context 'when there is a single added and single removed labels' do
+ let(:added) { [labels[0]] }
+ let(:removed) { [labels[3]] }
+
+ let(:expected_note) do
+ added_note = "added #{added.first.to_reference} label"
+ removed_note = "removed #{removed.first.to_reference} label"
+
+ "@#{current_user.username} #{added_note} and #{removed_note}"
+ end
+
+ it_behaves_like 'successfully created timeline event'
+ end
+
+ context 'when feature flag is disabled' do
+ let(:added) { [labels[0], labels[1]] }
+ let(:removed) { [labels[2], labels[3]] }
+
+ before do
+ stub_feature_flags(incident_timeline_events_from_labels: false)
+ end
+
+ it 'does not create timeline event' do
+ expect { execute }.not_to change { incident.incident_management_timeline_events.count }
+ end
+ end
+ end
end
end
diff --git a/spec/services/incident_management/timeline_events/update_service_spec.rb b/spec/services/incident_management/timeline_events/update_service_spec.rb
index 728f2fa3e9d..f612c72e2a8 100644
--- a/spec/services/incident_management/timeline_events/update_service_spec.rb
+++ b/spec/services/incident_management/timeline_events/update_service_spec.rb
@@ -32,6 +32,10 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
expect(execute.message).to eq(message)
end
+ it 'does not update the note' do
+ expect { execute }.not_to change { timeline_event.reload.note }
+ end
+
it_behaves_like 'does not track incident management event', :incident_management_timeline_event_edited
end
@@ -94,16 +98,7 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
context 'when note is blank' do
let(:params) { { note: '', occurred_at: occurred_at } }
- it_behaves_like 'successful response'
- it_behaves_like 'passing the correct was_changed value', :occurred_at
-
- it 'does not update the note' do
- expect { execute }.not_to change { timeline_event.reload.note }
- end
-
- it 'updates occurred_at' do
- expect { execute }.to change { timeline_event.occurred_at }.to(params[:occurred_at])
- end
+ it_behaves_like 'error response', "Note can't be blank"
end
context 'when occurred_at is nil' do
@@ -121,6 +116,12 @@ RSpec.describe IncidentManagement::TimelineEvents::UpdateService do
end
end
+ context 'when occurred_at is blank' do
+ let(:params) { { note: 'Updated note', occurred_at: '' } }
+
+ it_behaves_like 'error response', "Occurred at can't be blank"
+ end
+
context 'when both occurred_at and note is nil' do
let(:params) { {} }
diff --git a/spec/services/issuable/common_system_notes_service_spec.rb b/spec/services/issuable/common_system_notes_service_spec.rb
index 1426ef2a1f6..0d2b8a4ac3c 100644
--- a/spec/services/issuable/common_system_notes_service_spec.rb
+++ b/spec/services/issuable/common_system_notes_service_spec.rb
@@ -8,6 +8,37 @@ RSpec.describe Issuable::CommonSystemNotesService do
let(:issuable) { create(:issue, project: project) }
+ shared_examples 'system note for issuable date changes' do
+ it 'creates a system note for due_date set' do
+ issuable.update!(due_date: Date.today)
+
+ expect { subject }.to change(issuable.notes, :count).from(0).to(1)
+ expect(issuable.notes.last.note).to match('changed due date to')
+ end
+
+ it 'creates a system note for start_date set' do
+ issuable.update!(start_date: Date.today)
+
+ expect { subject }.to change(issuable.notes, :count).from(0).to(1)
+ expect(issuable.notes.last.note).to match('changed start date to')
+ end
+
+ it 'creates a note when both start and due date are changed' do
+ issuable.update!(start_date: Date.today, due_date: 1.week.from_now)
+
+ expect { subject }.to change { issuable.notes.count }.from(0).to(1)
+ expect(issuable.notes.last.note).to match(/changed start date to.+and changed due date to/)
+ end
+
+ it 'does not call SystemNoteService if no dates are changed' do
+ issuable.update!(title: 'new title')
+
+ expect(SystemNoteService).not_to receive(:change_start_date_or_due_date)
+
+ subject
+ end
+ end
+
context 'on issuable update' do
it_behaves_like 'system note creation', { title: 'New title' }, 'changed title'
it_behaves_like 'system note creation', { description: 'New description' }, 'changed the description'
@@ -61,6 +92,12 @@ RSpec.describe Issuable::CommonSystemNotesService do
end
end
end
+
+ context 'when changing dates' do
+ it_behaves_like 'system note for issuable date changes' do
+ subject { described_class.new(project: project, current_user: user).execute(issuable) }
+ end
+ end
end
context 'on issuable create' do
@@ -102,12 +139,8 @@ RSpec.describe Issuable::CommonSystemNotesService do
end
end
- it 'creates a system note for due_date set' do
- issuable.due_date = Date.today
- issuable.save!
-
- expect { subject }.to change { issuable.notes.count }.from(0).to(1)
- expect(issuable.notes.last.note).to match('changed due date')
+ context 'when changing dates' do
+ it_behaves_like 'system note for issuable date changes'
end
end
end
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index 858dfc4ab3a..435488b7f66 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -57,8 +57,20 @@ RSpec.describe Issues::CloneService do
expect(old_issue.notes.last.note).to start_with 'cloned to'
end
- it 'adds system note to new issue at the end' do
- expect(new_issue.notes.last.note).to start_with 'cloned from'
+ it 'adds system note to new issue at the start' do
+ # We set an assignee so an assignee system note will be generated and
+ # we can assert that the "cloned from" note is the first one
+ assignee = create(:user)
+ new_project.add_developer(assignee)
+ old_issue.assignees = [assignee]
+
+ new_issue = clone_service.execute(old_issue, new_project)
+
+ expect(new_issue.notes.size).to eq(2)
+
+ cloned_from_note = new_issue.notes.last
+ expect(cloned_from_note.note).to start_with 'cloned from'
+ expect(new_issue.notes.fresh.first).to eq(cloned_from_note)
end
it 'keeps old issue open' do
@@ -128,11 +140,11 @@ RSpec.describe Issues::CloneService do
context 'issue with award emoji' do
let!(:award_emoji) { create(:award_emoji, awardable: old_issue) }
- it 'copies the award emoji' do
+ it 'does not copy the award emoji' do
old_issue.reload
new_issue = clone_service.execute(old_issue, new_project)
- expect(old_issue.award_emoji.first.name).to eq new_issue.reload.award_emoji.first.name
+ expect(new_issue.reload.award_emoji).to be_empty
end
end
@@ -170,19 +182,21 @@ RSpec.describe Issues::CloneService do
context 'issue with due date' do
let(:date) { Date.parse('2020-01-10') }
+ let(:new_date) { date + 1.week }
let(:old_issue) do
create(:issue, title: title, description: description, project: old_project, author: author, due_date: date)
end
before do
- SystemNoteService.change_due_date(old_issue, old_project, author, old_issue.due_date)
+ old_issue.update!(due_date: new_date)
+ SystemNoteService.change_start_date_or_due_date(old_issue, old_project, author, old_issue.previous_changes.slice('due_date'))
end
it 'keeps the same due date' do
new_issue = clone_service.execute(old_issue, new_project)
- expect(new_issue.due_date).to eq(date)
+ expect(new_issue.due_date).to eq(old_issue.due_date)
end
end
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index 0bc8511e3e3..80c455e72b0 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -69,6 +69,12 @@ RSpec.describe Issues::CreateService do
expect(issue.issue_customer_relations_contacts).to be_empty
end
+ it 'calls NewIssueWorker with correct arguments' do
+ expect(NewIssueWorker).to receive(:perform_async).with(Integer, user.id, 'Issue')
+
+ issue
+ end
+
context 'when a build_service is provided' do
let(:issue) { described_class.new(project: project, current_user: user, params: opts, spam_params: spam_params, build_service: build_service).execute }
@@ -143,6 +149,12 @@ RSpec.describe Issues::CreateService do
issue
end
+ it 'calls NewIssueWorker with correct arguments' do
+ expect(NewIssueWorker).to receive(:perform_async).with(Integer, reporter.id, 'Issue')
+
+ issue
+ end
+
context 'when invalid' do
before do
opts.merge!(title: '')
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index 5a1bb2e8b74..863df810d01 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Issues::MoveService do
let_it_be(:new_project) { create(:project, namespace: sub_group_2) }
let(:old_issue) do
- create(:issue, title: title, description: description, project: old_project, author: author)
+ create(:issue, title: title, description: description, project: old_project, author: author, created_at: 1.day.ago, updated_at: 1.day.ago)
end
subject(:move_service) do
@@ -62,8 +62,11 @@ RSpec.describe Issues::MoveService do
expect(old_issue.notes.last.note).to start_with 'moved to'
end
- it 'adds system note to new issue at the end' do
- expect(new_issue.notes.last.note).to start_with 'moved from'
+ it 'adds system note to new issue at the end', :freeze_time do
+ system_note = new_issue.notes.last
+
+ expect(system_note.note).to start_with 'moved from'
+ expect(system_note.created_at).to be_like_time(Time.current)
end
it 'closes old issue' do
@@ -137,7 +140,8 @@ RSpec.describe Issues::MoveService do
end
before do
- SystemNoteService.change_due_date(old_issue, old_project, author, old_issue.due_date)
+ old_issue.update!(due_date: Date.today)
+ SystemNoteService.change_start_date_or_due_date(old_issue, old_project, author, old_issue.previous_changes.slice('due_date'))
end
it 'does not create extra system notes' do
diff --git a/spec/services/issues/prepare_import_csv_service_spec.rb b/spec/services/issues/prepare_import_csv_service_spec.rb
new file mode 100644
index 00000000000..ded23ee43b9
--- /dev/null
+++ b/spec/services/issues/prepare_import_csv_service_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issues::PrepareImportCsvService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:file) { double }
+ let(:upload_service) { double }
+ let(:uploader) { double }
+ let(:upload) { double }
+
+ let(:subject) do
+ described_class.new(project, user, file: file).execute
+ end
+
+ context 'when file is uploaded correctly' do
+ let(:upload_id) { 99 }
+
+ before do
+ mock_upload
+ end
+
+ it 'returns a success message' do
+ result = subject
+
+ expect(result[:status]).to eq(:success)
+ expect(result[:message]).to eq("Your issues are being imported. Once finished, you'll get a confirmation email.")
+ end
+
+ it 'enqueues the ImportRequirementsCsvWorker' do
+ expect(ImportIssuesCsvWorker).to receive(:perform_async).with(user.id, project.id, upload_id)
+
+ subject
+ end
+ end
+
+ context 'when file upload fails' do
+ before do
+ mock_upload(false)
+ end
+
+ it 'returns an error message' do
+ result = subject
+
+ expect(result[:status]).to eq(:error)
+ expect(result[:message]).to eq('File upload error.')
+ end
+ end
+end
diff --git a/spec/services/issues/referenced_merge_requests_service_spec.rb b/spec/services/issues/referenced_merge_requests_service_spec.rb
index dc55ba8ebea..16166c1fa33 100644
--- a/spec/services/issues/referenced_merge_requests_service_spec.rb
+++ b/spec/services/issues/referenced_merge_requests_service_spec.rb
@@ -106,7 +106,7 @@ RSpec.describe Issues::ReferencedMergeRequestsService do
end
describe '#closed_by_merge_requests' do
- let(:closed_issue) { build(:issue, :closed, project: project)}
+ let(:closed_issue) { build(:issue, :closed, project: project) }
it 'returns the open merge requests that close this issue' do
create_closing_mr(source_project: project, state: 'closed')
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index e2e8828ae89..aef3608831c 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -849,8 +849,8 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 1** as completed')
- note2 = find_note('marked the task **Task 2** as completed')
+ note1 = find_note('marked the checklist item **Task 1** as completed')
+ note2 = find_note('marked the checklist item **Task 2** as completed')
expect(note1).not_to be_nil
expect(note2).not_to be_nil
@@ -867,8 +867,8 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 1** as incomplete')
- note2 = find_note('marked the task **Task 2** as incomplete')
+ note1 = find_note('marked the checklist item **Task 1** as incomplete')
+ note2 = find_note('marked the checklist item **Task 2** as incomplete')
expect(note1).not_to be_nil
expect(note2).not_to be_nil
@@ -885,7 +885,7 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'does not create a system note for the task' do
- task_note = find_note('marked the task **Task 2** as incomplete')
+ task_note = find_note('marked the checklist item **Task 2** as incomplete')
description_notes = find_notes('description')
expect(task_note).to be_nil
@@ -900,7 +900,7 @@ RSpec.describe Issues::UpdateService, :mailer do
end
it 'does not create a system note referencing the position the old item' do
- task_note = find_note('marked the task **Two** as incomplete')
+ task_note = find_note('marked the checklist item **Two** as incomplete')
description_notes = find_notes('description')
expect(task_note).to be_nil
@@ -988,6 +988,52 @@ RSpec.describe Issues::UpdateService, :mailer do
end
end
+ context 'updating dates' do
+ subject(:result) { described_class.new(project: project, current_user: user, params: params).execute(issue) }
+
+ let(:updated_date) { 1.week.from_now.to_date }
+
+ shared_examples 'issue update service that triggers date updates' do
+ it 'triggers graphql date updated subscription' do
+ expect(GraphqlTriggers).to receive(:issuable_dates_updated).with(issue).and_call_original
+
+ result
+ end
+ end
+
+ shared_examples 'issue update service that does not trigger date updates' do
+ it 'does not trigger date updated subscriptions' do
+ expect(GraphqlTriggers).not_to receive(:issuable_dates_updated)
+
+ result
+ end
+ end
+
+ context 'when due_date is updated' do
+ let(:params) { { due_date: updated_date } }
+
+ it_behaves_like 'issue update service that triggers date updates'
+ end
+
+ context 'when start_date is updated' do
+ let(:params) { { start_date: updated_date } }
+
+ it_behaves_like 'issue update service that triggers date updates'
+ end
+
+ context 'when no date is updated' do
+ let(:params) { { title: 'should not trigger date updates' } }
+
+ it_behaves_like 'issue update service that does not trigger date updates'
+ end
+
+ context 'when update is not successful but date is provided' do
+ let(:params) { { title: '', due_date: updated_date } }
+
+ it_behaves_like 'issue update service that does not trigger date updates'
+ end
+ end
+
context 'updating asssignee_id' do
it 'does not update assignee when assignee_id is invalid' do
update_issue(assignee_ids: [-1])
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index 510f58f0e75..c0db3012a30 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -136,7 +136,7 @@ RSpec.describe JiraImport::StartImportService do
end
context 'when multiple Jira imports for same Jira project' do
- let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key)}
+ let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key) }
it 'creates Jira label title with correct number' do
jira_import = subject.payload[:import_data]
diff --git a/spec/services/lfs/push_service_spec.rb b/spec/services/lfs/push_service_spec.rb
index e1564ca2359..f52bba94eea 100644
--- a/spec/services/lfs/push_service_spec.rb
+++ b/spec/services/lfs/push_service_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe Lfs::PushService do
end
def batch_spec(*objects, upload: true, verify: false)
- { 'transfer' => 'basic', 'objects' => objects.map {|o| object_spec(o, upload: upload) } }
+ { 'transfer' => 'basic', 'objects' => objects.map { |o| object_spec(o, upload: upload) } }
end
def object_spec(object, upload: true, verify: false)
diff --git a/spec/services/markdown_content_rewriter_service_spec.rb b/spec/services/markdown_content_rewriter_service_spec.rb
index 91a117536ca..d94289856cf 100644
--- a/spec/services/markdown_content_rewriter_service_spec.rb
+++ b/spec/services/markdown_content_rewriter_service_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe MarkdownContentRewriterService do
let_it_be(:target_parent) { create(:project, :public) }
let(:content) { 'My content' }
- let(:issue) { create(:issue, project: source_parent, description: content)}
+ let(:issue) { create(:issue, project: source_parent, description: content) }
describe '#initialize' do
it 'raises an error if source_parent is not a Project' do
diff --git a/spec/services/members/groups/creator_service_spec.rb b/spec/services/members/groups/creator_service_spec.rb
index 4130fbd44fa..fced7195046 100644
--- a/spec/services/members/groups/creator_service_spec.rb
+++ b/spec/services/members/groups/creator_service_spec.rb
@@ -27,7 +27,10 @@ RSpec.describe Members::Groups::CreatorService do
context 'authorized projects update' do
it 'schedules a single project authorization update job when called multiple times' do
- expect(AuthorizedProjectsWorker).to receive(:bulk_perform_and_wait).once
+ # this is inline with the overridden behaviour in stubbed_member.rb
+ worker_instance = AuthorizedProjectsWorker.new
+ expect(AuthorizedProjectsWorker).to receive(:new).once.and_return(worker_instance)
+ expect(worker_instance).to receive(:perform).with(user.id)
1.upto(3) do
described_class.add_member(source, user, :maintainer)
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index d25c8996931..6dbe161ee02 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -455,7 +455,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
context 'when access_level is lower than inheriting member' do
- let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::GUEST }}
+ let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::GUEST } }
it 'does not add the member and returns an error' do
msg = "Access level should be greater than or equal " \
@@ -467,7 +467,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
context 'when access_level is the same as the inheriting member' do
- let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::DEVELOPER }}
+ let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::DEVELOPER } }
it 'adds the member with correct access_level' do
expect_to_create_members(count: 1)
@@ -477,7 +477,7 @@ RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_
end
context 'when access_level is greater than the inheriting member' do
- let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::MAINTAINER }}
+ let(:params) { { user_id: group_member.user_id, access_level: ProjectMember::MAINTAINER } }
it 'adds the member with correct access_level' do
expect_to_create_members(count: 1)
diff --git a/spec/services/merge_requests/approval_service_spec.rb b/spec/services/merge_requests/approval_service_spec.rb
index e1fbb945ee3..ab98fad5d73 100644
--- a/spec/services/merge_requests/approval_service_spec.rb
+++ b/spec/services/merge_requests/approval_service_spec.rb
@@ -20,79 +20,111 @@ RSpec.describe MergeRequests::ApprovalService do
allow(merge_request.approvals).to receive(:new).and_return(double(save: false))
end
- it 'does not create an approval note' do
- expect(SystemNoteService).not_to receive(:approve_mr)
+ it 'does not reset approvals' do
+ expect(merge_request.approvals).not_to receive(:reset)
service.execute(merge_request)
end
- it 'does not mark pending todos as done' do
- service.execute(merge_request)
-
- expect(todo.reload).to be_pending
- end
-
it 'does not track merge request approve action' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.not_to receive(:track_approve_mr_action).with(user: user)
service.execute(merge_request)
end
- end
-
- context 'with valid approval' do
- let(:notification_service) { NotificationService.new }
- before do
- allow(service).to receive(:notification_service).and_return(notification_service)
+ it 'does not publish MergeRequests::ApprovedEvent' do
+ expect { service.execute(merge_request) }.not_to publish_event(MergeRequests::ApprovedEvent)
end
- it 'creates an approval note and marks pending todos as done' do
- expect(SystemNoteService).to receive(:approve_mr).with(merge_request, user)
- expect(merge_request.approvals).to receive(:reset)
+ context 'async_after_approval feature flag is disabled' do
+ before do
+ stub_feature_flags(async_after_approval: false)
+ end
- service.execute(merge_request)
+ it 'does not create approve MR event' do
+ expect(EventCreateService).not_to receive(:new)
- expect(todo.reload).to be_done
- end
+ service.execute(merge_request)
+ end
- it 'creates approve MR event' do
- expect_next_instance_of(EventCreateService) do |instance|
- expect(instance).to receive(:approve_mr)
- .with(merge_request, user)
+ it 'does not create an approval note' do
+ expect(SystemNoteService).not_to receive(:approve_mr)
+
+ service.execute(merge_request)
end
- service.execute(merge_request)
+ it 'does not mark pending todos as done' do
+ service.execute(merge_request)
+
+ expect(todo.reload).to be_pending
+ end
end
+ end
- it 'sends a notification when approving' do
- expect(notification_service).to receive_message_chain(:async, :approve_mr)
- .with(merge_request, user)
+ context 'with valid approval' do
+ it 'resets approvals' do
+ expect(merge_request.approvals).to receive(:reset)
service.execute(merge_request)
end
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: user, merge_request: merge_request, user: user)
- .and_call_original
+ it 'tracks merge request approve action' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_approve_mr_action).with(user: user, merge_request: merge_request)
service.execute(merge_request)
end
- context 'with remaining approvals' do
- it 'fires an approval webhook' do
- expect(service).to receive(:execute_hooks).with(merge_request, 'approved')
+ it 'publishes MergeRequests::ApprovedEvent' do
+ expect { service.execute(merge_request) }
+ .to publish_event(MergeRequests::ApprovedEvent)
+ .with(current_user_id: user.id, merge_request_id: merge_request.id)
+ end
+
+ context 'async_after_approval feature flag is disabled' do
+ let(:notification_service) { NotificationService.new }
+
+ before do
+ stub_feature_flags(async_after_approval: false)
+ allow(service).to receive(:notification_service).and_return(notification_service)
+ end
+
+ it 'creates approve MR event' do
+ expect_next_instance_of(EventCreateService) do |instance|
+ expect(instance).to receive(:approve_mr)
+ .with(merge_request, user)
+ end
service.execute(merge_request)
end
- end
- it 'tracks merge request approve action' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_approve_mr_action).with(user: user, merge_request: merge_request)
+ it 'creates an approval note' do
+ expect(SystemNoteService).to receive(:approve_mr).with(merge_request, user)
- service.execute(merge_request)
+ service.execute(merge_request)
+ end
+
+ it 'marks pending todos as done' do
+ service.execute(merge_request)
+
+ expect(todo.reload).to be_done
+ end
+
+ it 'sends a notification when approving' do
+ expect(notification_service).to receive_message_chain(:async, :approve_mr)
+ .with(merge_request, user)
+
+ service.execute(merge_request)
+ end
+
+ context 'with remaining approvals' do
+ it 'fires an approval webhook' do
+ expect(service).to receive(:execute_hooks).with(merge_request, 'approved')
+
+ service.execute(merge_request)
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb b/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
deleted file mode 100644
index b2326a28e63..00000000000
--- a/spec/services/merge_requests/bulk_remove_attention_requested_service_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::BulkRemoveAttentionRequestedService do
- let(:current_user) { create(:user) }
- let(:user) { create(:user) }
- let(:assignee_user) { create(:user) }
- let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
- let(:reviewer) { merge_request.find_reviewer(user) }
- let(:assignee) { merge_request.find_assignee(assignee_user) }
- let(:project) { merge_request.project }
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, users: [user, assignee_user]) }
- let(:result) { service.execute }
-
- before do
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- describe '#execute' do
- context 'invalid permissions' do
- let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, users: [user]) }
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'updates reviewers and assignees' do
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates reviewers state' do
- service.execute
- reviewer.reload
- assignee.reload
-
- expect(reviewer.state).to eq 'reviewed'
- expect(assignee.state).to eq 'reviewed'
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [assignee_user, user] }
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index cd1c362a19f..8f448184b45 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -54,10 +54,6 @@ RSpec.describe MergeRequests::CloseService do
expect(todo.reload).to be_done
end
- it 'removes attention requested state' do
- expect(merge_request.find_assignee(user2).attention_requested?).to eq(false)
- end
-
context 'when auto merge is enabled' do
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
diff --git a/spec/services/merge_requests/create_approval_event_service_spec.rb b/spec/services/merge_requests/create_approval_event_service_spec.rb
new file mode 100644
index 00000000000..3d41ace11a7
--- /dev/null
+++ b/spec/services/merge_requests/create_approval_event_service_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateApprovalEventService do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+
+ subject(:service) { described_class.new(project: project, current_user: user) }
+
+ describe '#execute' do
+ it 'creates approve MR event' do
+ expect_next_instance_of(EventCreateService) do |instance|
+ expect(instance).to receive(:approve_mr)
+ .with(merge_request, user)
+ end
+
+ service.execute(merge_request)
+ end
+ end
+end
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 03a37ea59a3..c443d758a77 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -74,6 +74,16 @@ RSpec.describe MergeRequests::CreatePipelineService do
expect(response.payload.project).to eq(project)
end
+ context 'when the feature is disabled in CI/CD settings' do
+ before do
+ project.update!(ci_allow_fork_pipelines_to_run_in_parent_project: false)
+ end
+
+ it 'creates a pipeline in the source project' do
+ expect(response.payload.project).to eq(source_project)
+ end
+ end
+
context 'when source branch is protected' do
context 'when actor does not have permission to update the protected branch in target project' do
let!(:protected_branch) { create(:protected_branch, name: '*', project: project) }
diff --git a/spec/services/merge_requests/execute_approval_hooks_service_spec.rb b/spec/services/merge_requests/execute_approval_hooks_service_spec.rb
new file mode 100644
index 00000000000..863c47e8191
--- /dev/null
+++ b/spec/services/merge_requests/execute_approval_hooks_service_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ExecuteApprovalHooksService do
+ let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request) }
+ let(:project) { merge_request.project }
+
+ subject(:service) { described_class.new(project: project, current_user: user) }
+
+ describe '#execute' do
+ let(:notification_service) { NotificationService.new }
+
+ before do
+ allow(service).to receive(:notification_service).and_return(notification_service)
+ end
+ it 'sends a notification when approving' do
+ expect(notification_service).to receive_message_chain(:async, :approve_mr)
+ .with(merge_request, user)
+
+ service.execute(merge_request)
+ end
+
+ context 'with remaining approvals' do
+ it 'fires an approval webhook' do
+ expect(service).to receive(:execute_hooks).with(merge_request, 'approved')
+
+ service.execute(merge_request)
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
index fa3b1614e21..c43f5db6059 100644
--- a/spec/services/merge_requests/handle_assignees_change_service_spec.rb
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -87,14 +87,6 @@ RSpec.describe MergeRequests::HandleAssigneesChangeService do
expect(todo).to be_pending
end
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: user, merge_request: merge_request, user: user)
- .and_call_original
-
- execute
- end
-
it 'tracks users assigned event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_users_assigned_to_mr).once.with(users: [assignee])
diff --git a/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
index 9e178c121ef..6cc1079c94a 100644
--- a/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_broken_status_service_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService do
let(:merge_request) { build(:merge_request) }
describe '#execute' do
+ let(:result) { check_broken_status.execute }
+
before do
expect(merge_request).to receive(:broken?).and_return(broken)
end
@@ -16,7 +18,8 @@ RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService do
let(:broken) { true }
it 'returns a check result with status failed' do
- expect(check_broken_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:broken_status)
end
end
@@ -24,7 +27,7 @@ RSpec.describe MergeRequests::Mergeability::CheckBrokenStatusService do
let(:broken) { false }
it 'returns a check result with status success' do
- expect(check_broken_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
index 6fbbecd7c0e..def3cb0ca28 100644
--- a/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_ci_status_service_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe MergeRequests::Mergeability::CheckCiStatusService do
let(:skip_check) { false }
describe '#execute' do
+ let(:result) { check_ci_status.execute }
+
before do
expect(merge_request).to receive(:mergeable_ci_state?).and_return(mergeable)
end
@@ -18,7 +20,7 @@ RSpec.describe MergeRequests::Mergeability::CheckCiStatusService do
let(:mergeable) { true }
it 'returns a check result with status success' do
- expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
end
end
@@ -26,7 +28,8 @@ RSpec.describe MergeRequests::Mergeability::CheckCiStatusService do
let(:mergeable) { false }
it 'returns a check result with status failed' do
- expect(check_ci_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq :ci_must_pass
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
index c24d40967c4..9f107ce046a 100644
--- a/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_discussions_status_service_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService do
let(:skip_check) { false }
describe '#execute' do
+ let(:result) { check_discussions_status.execute }
+
before do
expect(merge_request).to receive(:mergeable_discussions_state?).and_return(mergeable)
end
@@ -18,7 +20,7 @@ RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService do
let(:mergeable) { true }
it 'returns a check result with status success' do
- expect(check_discussions_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
end
end
@@ -26,7 +28,8 @@ RSpec.describe MergeRequests::Mergeability::CheckDiscussionsStatusService do
let(:mergeable) { false }
it 'returns a check result with status failed' do
- expect(check_discussions_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:discussions_not_resolved)
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
index 923cff220ef..e9363e5d676 100644
--- a/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_draft_status_service_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService do
let(:merge_request) { build(:merge_request) }
describe '#execute' do
+ let(:result) { check_draft_status.execute }
+
before do
expect(merge_request).to receive(:draft?).and_return(draft)
end
@@ -16,7 +18,8 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService do
let(:draft) { true }
it 'returns a check result with status failed' do
- expect(check_draft_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:draft_status)
end
end
@@ -24,7 +27,7 @@ RSpec.describe MergeRequests::Mergeability::CheckDraftStatusService do
let(:draft) { false }
it 'returns a check result with status success' do
- expect(check_draft_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
end
end
end
diff --git a/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
index b1c9a930317..936524b020a 100644
--- a/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/check_open_status_service_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService do
let(:merge_request) { build(:merge_request) }
describe '#execute' do
+ let(:result) { check_open_status.execute }
+
before do
expect(merge_request).to receive(:open?).and_return(open)
end
@@ -16,7 +18,7 @@ RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService do
let(:open) { true }
it 'returns a check result with status success' do
- expect(check_open_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::SUCCESS_STATUS
end
end
@@ -24,7 +26,8 @@ RSpec.describe MergeRequests::Mergeability::CheckOpenStatusService do
let(:open) { false }
it 'returns a check result with status failed' do
- expect(check_open_status.execute.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.status).to eq Gitlab::MergeRequests::Mergeability::CheckResult::FAILED_STATUS
+ expect(result.payload[:reason]).to eq(:not_open)
end
end
end
diff --git a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
index 2bb7dc3eef7..afea3e952a1 100644
--- a/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
+++ b/spec/services/merge_requests/mergeability/run_checks_service_spec.rb
@@ -5,11 +5,11 @@ require 'spec_helper'
RSpec.describe MergeRequests::Mergeability::RunChecksService do
subject(:run_checks) { described_class.new(merge_request: merge_request, params: {}) }
- let_it_be(:merge_request) { create(:merge_request) }
-
describe '#execute' do
subject(:execute) { run_checks.execute }
+ let_it_be(:merge_request) { create(:merge_request) }
+
let(:params) { {} }
let(:success_result) { Gitlab::MergeRequests::Mergeability::CheckResult.success }
@@ -23,7 +23,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
end
it 'is still a success' do
- expect(execute.all?(&:success?)).to eq(true)
+ expect(execute.success?).to eq(true)
end
end
@@ -41,13 +41,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).not_to receive(:execute)
end
- # Since we're only marking one check to be skipped, we expect to receive
- # `# of checks - 1` success result objects in return
- #
- check_count = merge_request.mergeability_checks.count - 1
- success_array = (1..check_count).each_with_object([]) { |_, array| array << success_result }
-
- expect(execute).to match_array(success_array)
+ expect(execute.success?).to eq(true)
end
end
@@ -75,7 +69,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).to receive(:read).with(merge_check: merge_check).and_return(success_result)
end
- expect(execute).to match_array([success_result])
+ expect(execute.success?).to eq(true)
end
end
@@ -86,7 +80,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
expect(service).to receive(:write).with(merge_check: merge_check, result_hash: success_result.to_hash).and_return(true)
end
- expect(execute).to match_array([success_result])
+ expect(execute.success?).to eq(true)
end
end
end
@@ -97,7 +91,7 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
it 'does not call the results store' do
expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
- expect(execute).to match_array([success_result])
+ expect(execute.success?).to eq(true)
end
end
@@ -109,9 +103,81 @@ RSpec.describe MergeRequests::Mergeability::RunChecksService do
it 'does not call the results store' do
expect(Gitlab::MergeRequests::Mergeability::ResultsStore).not_to receive(:new)
- expect(execute).to match_array([success_result])
+ expect(execute.success?).to eq(true)
end
end
end
end
+
+ describe '#success?' do
+ subject(:success) { run_checks.success? }
+
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ context 'when the execute method has been executed' do
+ before do
+ run_checks.execute
+ end
+
+ context 'when all the checks succeed' do
+ it 'returns true' do
+ expect(success).to eq(true)
+ end
+ end
+
+ context 'when one check fails' do
+ before do
+ allow(merge_request).to receive(:open?).and_return(false)
+ run_checks.execute
+ end
+
+ it 'returns false' do
+ expect(success).to eq(false)
+ end
+ end
+ end
+
+ context 'when execute has not been exectued' do
+ it 'raises an error' do
+ expect { subject }
+ .to raise_error(/Execute needs to be called before/)
+ end
+ end
+ end
+
+ describe '#failure_reason' do
+ subject(:failure_reason) { run_checks.failure_reason }
+
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ context 'when the execute method has been executed' do
+ before do
+ run_checks.execute
+ end
+
+ context 'when all the checks succeed' do
+ it 'returns nil' do
+ expect(failure_reason).to eq(nil)
+ end
+ end
+
+ context 'when one check fails' do
+ before do
+ allow(merge_request).to receive(:open?).and_return(false)
+ run_checks.execute
+ end
+
+ it 'returns the open reason' do
+ expect(failure_reason).to eq(:not_open)
+ end
+ end
+ end
+
+ context 'when execute has not been exectued' do
+ it 'raises an error' do
+ expect { subject }
+ .to raise_error(/Execute needs to be called before/)
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index 338057f23d5..391377ad801 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -179,7 +179,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
end
@@ -231,7 +231,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the merge request to merge when pipeline succeeds'
@@ -284,7 +284,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can remove the source branch when it is merged'
@@ -337,7 +337,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the target of a merge request'
@@ -390,7 +390,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the title of a merge request'
@@ -443,7 +443,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the description of a merge request'
@@ -503,7 +503,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the draft of a merge request'
@@ -564,7 +564,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can change labels of a merge request', 2
@@ -617,7 +617,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can change labels of a merge request', 1
@@ -672,7 +672,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
context 'with an existing branch that has a merge request open' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can set the milestone of a merge request'
@@ -713,7 +713,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
shared_examples 'with an existing branch that has a merge request open in foss' do
let(:changes) { existing_branch_changes }
- let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
it_behaves_like 'a service that does not create a merge request'
it_behaves_like 'a service that can change assignees of a merge request', 1
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 4b7dd84474a..09d06b8b2ab 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -185,7 +185,7 @@ RSpec.describe MergeRequests::RefreshService do
end
context 'when pipeline exists for the source branch' do
- let!(:pipeline) { create(:ci_empty_pipeline, ref: @merge_request.source_branch, project: @project, sha: @commits.first.sha)}
+ let!(:pipeline) { create(:ci_empty_pipeline, ref: @merge_request.source_branch, project: @project, sha: @commits.first.sha) }
subject { service.new(project: @project, current_user: @user).execute(@oldrev, @newrev, 'refs/heads/master') }
diff --git a/spec/services/merge_requests/remove_attention_requested_service_spec.rb b/spec/services/merge_requests/remove_attention_requested_service_spec.rb
deleted file mode 100644
index 576049b9f1b..00000000000
--- a/spec/services/merge_requests/remove_attention_requested_service_spec.rb
+++ /dev/null
@@ -1,183 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::RemoveAttentionRequestedService do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
- let_it_be(:assignee_user) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
-
- let(:reviewer) { merge_request.find_reviewer(user) }
- let(:assignee) { merge_request.find_assignee(assignee_user) }
- let(:project) { merge_request.project }
-
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: user
- )
- end
-
- let(:result) { service.execute }
-
- before do
- allow(SystemNoteService).to receive(:remove_attention_request)
-
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- describe '#execute' do
- context 'when current user cannot update merge request' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: create(:user),
- merge_request: merge_request,
- user: user
- )
- end
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'when user is not a reviewer nor assignee' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: create(:user)
- )
- end
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'when user is a reviewer' do
- before do
- reviewer.update!(state: :attention_requested)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates reviewer state' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq 'reviewed'
- end
-
- it 'creates a remove attention request system note' do
- expect(SystemNoteService)
- .to receive(:remove_attention_request)
- .with(merge_request, merge_request.project, current_user, user)
-
- service.execute
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [user] }
- end
- end
-
- context 'when user is an assignee' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: assignee_user
- )
- end
-
- before do
- assignee.update!(state: :attention_requested)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates assignee state' do
- service.execute
- assignee.reload
-
- expect(assignee.state).to eq 'reviewed'
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [assignee_user] }
- end
-
- it 'creates a remove attention request system note' do
- expect(SystemNoteService)
- .to receive(:remove_attention_request)
- .with(merge_request, merge_request.project, current_user, assignee_user)
-
- service.execute
- end
- end
-
- context 'when user is an assignee and reviewer at the same time' do
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
-
- let(:assignee) { merge_request.find_assignee(user) }
-
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: user
- )
- end
-
- before do
- reviewer.update!(state: :attention_requested)
- assignee.update!(state: :attention_requested)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates reviewers and assignees state' do
- service.execute
- reviewer.reload
- assignee.reload
-
- expect(reviewer.state).to eq 'reviewed'
- expect(assignee.state).to eq 'reviewed'
- end
- end
-
- context 'when state is already not attention_requested' do
- before do
- reviewer.update!(state: :reviewed)
- end
-
- it 'does not change state' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq 'reviewed'
- end
-
- it 'does not create a remove attention request system note' do
- expect(SystemNoteService).not_to receive(:remove_attention_request)
-
- service.execute
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/request_attention_service_spec.rb b/spec/services/merge_requests/request_attention_service_spec.rb
deleted file mode 100644
index 813a8150625..00000000000
--- a/spec/services/merge_requests/request_attention_service_spec.rb
+++ /dev/null
@@ -1,220 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::RequestAttentionService do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:user) { create(:user) }
- let_it_be(:assignee_user) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
-
- let(:reviewer) { merge_request.find_reviewer(user) }
- let(:assignee) { merge_request.find_assignee(assignee_user) }
- let(:project) { merge_request.project }
-
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: user
- )
- end
-
- let(:result) { service.execute }
- let(:todo_svc) { instance_double('TodoService') }
- let(:notification_svc) { instance_double('NotificationService') }
-
- before do
- allow(service).to receive(:todo_service).and_return(todo_svc)
- allow(service).to receive(:notification_service).and_return(notification_svc)
- allow(todo_svc).to receive(:create_attention_requested_todo)
- allow(notification_svc).to receive_message_chain(:async, :attention_requested_of_merge_request)
- allow(SystemNoteService).to receive(:request_attention)
-
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- describe '#execute' do
- context 'when current user cannot update merge request' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: create(:user),
- merge_request: merge_request,
- user: user
- )
- end
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'when user is not a reviewer nor assignee' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: create(:user)
- )
- end
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'when user is a reviewer' do
- before do
- reviewer.update!(state: :reviewed)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates reviewers state' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq 'attention_requested'
- end
-
- it 'adds who toggled attention' do
- service.execute
- reviewer.reload
-
- expect(reviewer.updated_state_by).to eq current_user
- end
-
- it 'creates a new todo for the reviewer' do
- expect(todo_svc).to receive(:create_attention_requested_todo).with(merge_request, current_user, user)
-
- service.execute
- end
-
- it 'sends email to reviewer' do
- expect(notification_svc)
- .to receive_message_chain(:async, :attention_requested_of_merge_request)
- .with(merge_request, current_user, user)
-
- service.execute
- end
-
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
- .and_call_original
-
- service.execute
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [user] }
- end
- end
-
- context 'when user is an assignee' do
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: assignee_user
- )
- end
-
- before do
- assignee.update!(state: :reviewed)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates assignees state' do
- service.execute
- assignee.reload
-
- expect(assignee.state).to eq 'attention_requested'
- end
-
- it 'creates a new todo for the reviewer' do
- expect(todo_svc).to receive(:create_attention_requested_todo).with(merge_request, current_user, assignee_user)
-
- service.execute
- end
-
- it 'creates a request attention system note' do
- expect(SystemNoteService)
- .to receive(:request_attention)
- .with(merge_request, merge_request.project, current_user, assignee_user)
-
- service.execute
- end
-
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
- .and_call_original
-
- service.execute
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [assignee_user] }
- end
- end
-
- context 'when user is an assignee and reviewer at the same time' do
- let_it_be(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
-
- let(:assignee) { merge_request.find_assignee(user) }
-
- let(:service) do
- described_class.new(
- project: project,
- current_user: current_user,
- merge_request: merge_request,
- user: user
- )
- end
-
- before do
- reviewer.update!(state: :reviewed)
- assignee.update!(state: :reviewed)
- end
-
- it 'updates reviewers and assignees state' do
- service.execute
- reviewer.reload
- assignee.reload
-
- expect(reviewer.state).to eq 'attention_requested'
- expect(assignee.state).to eq 'attention_requested'
- end
- end
-
- context 'when state is attention_requested' do
- before do
- reviewer.update!(state: :attention_requested)
- end
-
- it 'does not change state' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq 'attention_requested'
- end
-
- it 'does not create a new todo for the reviewer' do
- expect(todo_svc).not_to receive(:create_attention_requested_todo).with(merge_request, current_user, user)
-
- service.execute
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb b/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
deleted file mode 100644
index 20bc536b21e..00000000000
--- a/spec/services/merge_requests/toggle_attention_requested_service_spec.rb
+++ /dev/null
@@ -1,188 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe MergeRequests::ToggleAttentionRequestedService do
- let(:current_user) { create(:user) }
- let(:user) { create(:user) }
- let(:assignee_user) { create(:user) }
- let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [assignee_user]) }
- let(:reviewer) { merge_request.find_reviewer(user) }
- let(:assignee) { merge_request.find_assignee(assignee_user) }
- let(:project) { merge_request.project }
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
- let(:result) { service.execute }
- let(:todo_service) { spy('todo service') }
- let(:notification_service) { spy('notification service') }
-
- before do
- allow(NotificationService).to receive(:new) { notification_service }
- allow(service).to receive(:todo_service).and_return(todo_service)
- allow(service).to receive(:notification_service).and_return(notification_service)
- allow(SystemNoteService).to receive(:request_attention)
- allow(SystemNoteService).to receive(:remove_attention_request)
-
- project.add_developer(current_user)
- project.add_developer(user)
- end
-
- describe '#execute' do
- context 'invalid permissions' do
- let(:service) { described_class.new(project: project, current_user: create(:user), merge_request: merge_request, user: user) }
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'reviewer does not exist' do
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: create(:user)) }
-
- it 'returns an error' do
- expect(result[:status]).to eq :error
- end
- end
-
- context 'reviewer exists' do
- before do
- reviewer.update!(state: :reviewed)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates reviewers state' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq 'attention_requested'
- end
-
- it 'adds who toggled attention' do
- service.execute
- reviewer.reload
-
- expect(reviewer.updated_state_by).to eq current_user
- end
-
- it 'creates a new todo for the reviewer' do
- expect(todo_service).to receive(:create_attention_requested_todo).with(merge_request, current_user, user)
-
- service.execute
- end
-
- it 'sends email to reviewer' do
- expect(notification_service).to receive_message_chain(:async, :attention_requested_of_merge_request).with(merge_request, current_user, user)
-
- service.execute
- end
-
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
- .and_call_original
-
- service.execute
- end
-
- it 'invalidates cache' do
- cache_mock = double
-
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
-
- allow(Rails).to receive(:cache).and_return(cache_mock)
-
- service.execute
- end
- end
-
- context 'assignee exists' do
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: assignee_user) }
-
- before do
- assignee.update!(state: :reviewed)
- end
-
- it 'returns success' do
- expect(result[:status]).to eq :success
- end
-
- it 'updates assignees state' do
- service.execute
- assignee.reload
-
- expect(assignee.state).to eq 'attention_requested'
- end
-
- it 'creates a new todo for the reviewer' do
- expect(todo_service).to receive(:create_attention_requested_todo).with(merge_request, current_user, assignee_user)
-
- service.execute
- end
-
- it 'creates a request attention system note' do
- expect(SystemNoteService).to receive(:request_attention).with(merge_request, merge_request.project, current_user, assignee_user)
-
- service.execute
- end
-
- it 'removes attention requested state' do
- expect(MergeRequests::RemoveAttentionRequestedService).to receive(:new)
- .with(project: project, current_user: current_user, merge_request: merge_request, user: current_user)
- .and_call_original
-
- service.execute
- end
-
- it_behaves_like 'invalidates attention request cache' do
- let(:users) { [assignee_user] }
- end
- end
-
- context 'assignee is the same as reviewer' do
- let(:merge_request) { create(:merge_request, reviewers: [user], assignees: [user]) }
- let(:service) { described_class.new(project: project, current_user: current_user, merge_request: merge_request, user: user) }
- let(:assignee) { merge_request.find_assignee(user) }
-
- before do
- reviewer.update!(state: :reviewed)
- assignee.update!(state: :reviewed)
- end
-
- it 'updates reviewers and assignees state' do
- service.execute
- reviewer.reload
- assignee.reload
-
- expect(reviewer.state).to eq 'attention_requested'
- expect(assignee.state).to eq 'attention_requested'
- end
- end
-
- context 'state is attention_requested' do
- before do
- reviewer.update!(state: :attention_requested)
- end
-
- it 'toggles state to reviewed' do
- service.execute
- reviewer.reload
-
- expect(reviewer.state).to eq "reviewed"
- end
-
- it 'does not create a new todo for the reviewer' do
- expect(todo_service).not_to receive(:create_attention_requested_todo).with(merge_request, current_user, assignee_user)
-
- service.execute
- end
-
- it 'creates a remove attention request system note' do
- expect(SystemNoteService).to receive(:remove_attention_request).with(merge_request, merge_request.project, current_user, user)
-
- service.execute
- end
- end
- end
-end
diff --git a/spec/services/merge_requests/update_reviewers_service_spec.rb b/spec/services/merge_requests/update_reviewers_service_spec.rb
new file mode 100644
index 00000000000..8920141adbb
--- /dev/null
+++ b/spec/services/merge_requests/update_reviewers_service_spec.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::UpdateReviewersService do
+ include AfterNextHelpers
+
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :private, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+
+ let_it_be_with_reload(:merge_request) do
+ create(:merge_request, :simple, :unique_branches,
+ title: 'Old title',
+ description: "FYI #{user2.to_reference}",
+ reviewer_ids: [user3.id],
+ source_project: project,
+ target_project: project,
+ author: create(:user))
+ end
+
+ before do
+ project.add_maintainer(user)
+ project.add_developer(user2)
+ project.add_developer(user3)
+ merge_request.errors.clear
+ end
+
+ let(:service) { described_class.new(project: project, current_user: user, params: opts) }
+ let(:opts) { { reviewer_ids: [user2.id] } }
+
+ describe 'execute' do
+ def set_reviewers
+ service.execute(merge_request)
+ end
+
+ def find_note(starting_with)
+ merge_request.notes.find do |note|
+ note && note.note.start_with?(starting_with)
+ end
+ end
+
+ shared_examples 'removing all reviewers' do
+ it 'removes all reviewers' do
+ expect(set_reviewers).to have_attributes(reviewers: be_empty, errors: be_none)
+ end
+ end
+
+ context 'when the parameters are valid' do
+ context 'when using sentinel values' do
+ let(:opts) { { reviewer_ids: [0] } }
+
+ it_behaves_like 'removing all reviewers'
+ end
+
+ context 'when the reviewer_ids parameter is the empty list' do
+ let(:opts) { { reviewer_ids: [] } }
+
+ it_behaves_like 'removing all reviewers'
+ end
+
+ it 'updates the MR' do
+ expect { set_reviewers }
+ .to change { merge_request.reload.reviewers }.from([user3]).to([user2])
+ .and change(merge_request, :updated_at)
+ .and change(merge_request, :updated_by).to(user)
+ end
+
+ it 'creates system note about merge_request review request' do
+ set_reviewers
+
+ note = find_note('requested review from')
+
+ expect(note).not_to be_nil
+ expect(note.note).to include "requested review from #{user2.to_reference}"
+ end
+
+ it 'creates a pending todo for new review request' do
+ set_reviewers
+
+ attributes = {
+ project: project,
+ author: user,
+ user: user2,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name,
+ action: Todo::REVIEW_REQUESTED,
+ state: :pending
+ }
+
+ expect(Todo.where(attributes).count).to eq 1
+ end
+
+ it 'sends email reviewer change notifications to old and new reviewers', :sidekiq_inline, :mailer do
+ perform_enqueued_jobs do
+ set_reviewers
+ end
+
+ should_email(user2)
+ should_email(user3)
+ end
+
+ it 'updates open merge request counter for reviewers', :use_clean_rails_memory_store_caching do
+ # Cache them to ensure the cache gets invalidated on update
+ expect(user2.review_requested_open_merge_requests_count).to eq(0)
+ expect(user3.review_requested_open_merge_requests_count).to eq(1)
+
+ set_reviewers
+
+ expect(user2.review_requested_open_merge_requests_count).to eq(1)
+ expect(user3.review_requested_open_merge_requests_count).to eq(0)
+ end
+
+ it 'updates the tracking' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_users_review_requested)
+ .with(users: [user2])
+
+ set_reviewers
+ end
+
+ it 'tracks reviewers changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_reviewers_changed_action).once.with(user: user)
+
+ set_reviewers
+ end
+
+ it 'calls MergeRequest::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ set_reviewers
+ end
+
+ it 'executes hooks with update action' do
+ expect(service).to receive(:execute_hooks)
+ .with(
+ merge_request,
+ 'update',
+ old_associations: {
+ reviewers: [user3]
+ }
+ )
+
+ set_reviewers
+ end
+
+ it 'does not update the reviewers if they do not have access' do
+ opts[:reviewer_ids] = [create(:user).id]
+
+ expect(set_reviewers).to have_attributes(
+ reviewers: [user3],
+ errors: be_any
+ )
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 212f75d853f..b7fb48718d8 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
context 'usage counters' do
let(:merge_request2) { create(:merge_request) }
- let(:draft_merge_request) { create(:merge_request, :draft_merge_request)}
+ let(:draft_merge_request) { create(:merge_request, :draft_merge_request) }
it 'update as expected' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
@@ -980,8 +980,8 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 1** as completed')
- note2 = find_note('marked the task **Task 2** as completed')
+ note1 = find_note('marked the checklist item **Task 1** as completed')
+ note2 = find_note('marked the checklist item **Task 2** as completed')
expect(note1).not_to be_nil
expect(note2).not_to be_nil
@@ -998,8 +998,8 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 1** as incomplete')
- note2 = find_note('marked the task **Task 2** as incomplete')
+ note1 = find_note('marked the checklist item **Task 1** as incomplete')
+ note2 = find_note('marked the checklist item **Task 2** as incomplete')
expect(note1).not_to be_nil
expect(note2).not_to be_nil
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index afbc9c7dca2..b15d90d685c 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe Milestones::TransferService do
let(:new_group) { create(:group) }
let(:old_group) { create(:group) }
let(:project) { create(:project, namespace: old_group) }
- let(:group_milestone) { create(:milestone, group: old_group)}
- let(:group_milestone2) { create(:milestone, group: old_group)}
- let(:project_milestone) { create(:milestone, project: project)}
+ let(:group_milestone) { create(:milestone, group: old_group) }
+ let(:group_milestone2) { create(:milestone, group: old_group) }
+ let(:project_milestone) { create(:milestone, project: project) }
let!(:issue_with_group_milestone) { create(:issue, project: project, milestone: group_milestone) }
let!(:issue_with_project_milestone) { create(:issue, project: project, milestone: project_milestone) }
let!(:mr_with_group_milestone) { create(:merge_request, source_project: project, source_branch: 'branch-1', milestone: group_milestone) }
@@ -43,7 +43,7 @@ RSpec.describe Milestones::TransferService do
context 'when milestone is from an ancestor group' do
let(:old_group_ancestor) { create(:group) }
let(:old_group) { create(:group, parent: old_group_ancestor) }
- let(:group_milestone) { create(:milestone, group: old_group_ancestor)}
+ let(:group_milestone) { create(:milestone, group: old_group_ancestor) }
it 'recreates the missing group milestones at project level' do
expect { service.execute }.to change(project.milestones, :count).by(1)
diff --git a/spec/services/notes/build_service_spec.rb b/spec/services/notes/build_service_spec.rb
index 0e2bbcc8c66..c25895d2efa 100644
--- a/spec/services/notes/build_service_spec.rb
+++ b/spec/services/notes/build_service_spec.rb
@@ -170,7 +170,7 @@ RSpec.describe Notes::BuildService do
end
context 'when creating a new confidential comment' do
- let(:params) { { confidential: true, noteable: issue } }
+ let(:params) { { internal: true, noteable: issue } }
shared_examples 'user allowed to set comment as confidential' do
it { expect(new_note.confidential).to be_truthy }
@@ -219,6 +219,14 @@ RSpec.describe Notes::BuildService do
it_behaves_like 'user not allowed to set comment as confidential'
end
+
+ context 'when using the deprecated `confidential` parameter' do
+ let(:params) { { internal: true, noteable: issue } }
+
+ shared_examples 'user allowed to set comment as confidential' do
+ it { expect(new_note.confidential).to be_truthy }
+ end
+ end
end
context 'when replying to a confidential comment' do
diff --git a/spec/services/notes/copy_service_spec.rb b/spec/services/notes/copy_service_spec.rb
index fd8802e6640..f146a49e929 100644
--- a/spec/services/notes/copy_service_spec.rb
+++ b/spec/services/notes/copy_service_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe Notes::CopyService do
context 'notes with upload' do
let(:uploader) { build(:file_uploader, project: from_noteable.project) }
- let(:text) { "Simple text with image: #{uploader.markdown_link} "}
+ let(:text) { "Simple text with image: #{uploader.markdown_link} " }
let!(:note) { create(:note, noteable: from_noteable, note: text, project: from_noteable.project) }
it 'rewrites note content correctly' do
@@ -146,8 +146,8 @@ RSpec.describe Notes::CopyService do
new_note = to_noteable.notes.first
aggregate_failures do
- expect(note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
- expect(new_note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/)
+ expect(note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/o)
+ expect(new_note.note).to match(/Simple text with image: #{FileUploader::MARKDOWN_PATTERN}/o)
expect(note.note).not_to eq(new_note.note)
expect(note.note_html).not_to eq(new_note.note_html)
end
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 53b75a3c991..37318d76586 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -7,37 +7,74 @@ RSpec.describe Notes::CreateService do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
- let(:opts) do
- { note: 'Awesome comment', noteable_type: 'Issue', noteable_id: issue.id, confidential: true }
- end
+ let(:base_opts) { { note: 'Awesome comment', noteable_type: 'Issue', noteable_id: issue.id } }
+ let(:opts) { base_opts.merge(confidential: true) }
describe '#execute' do
+ subject(:note) { described_class.new(project, user, opts).execute }
+
before do
project.add_maintainer(user)
end
context "valid params" do
it 'returns a valid note' do
- note = described_class.new(project, user, opts).execute
-
expect(note).to be_valid
end
it 'returns a persisted note' do
- note = described_class.new(project, user, opts).execute
-
expect(note).to be_persisted
end
- it 'note has valid content' do
- note = described_class.new(project, user, opts).execute
+ context 'with internal parameter' do
+ context 'when confidential' do
+ let(:opts) { base_opts.merge(internal: true) }
+
+ it 'returns a confidential note' do
+ expect(note).to be_confidential
+ end
+ end
+
+ context 'when not confidential' do
+ let(:opts) { base_opts.merge(internal: false) }
+
+ it 'returns a confidential note' do
+ expect(note).not_to be_confidential
+ end
+ end
+ end
+
+ context 'with confidential parameter' do
+ context 'when confidential' do
+ let(:opts) { base_opts.merge(confidential: true) }
+
+ it 'returns a confidential note' do
+ expect(note).to be_confidential
+ end
+ end
+
+ context 'when not confidential' do
+ let(:opts) { base_opts.merge(confidential: false) }
+ it 'returns a confidential note' do
+ expect(note).not_to be_confidential
+ end
+ end
+ end
+
+ context 'with confidential and internal parameter set' do
+ let(:opts) { base_opts.merge(internal: true, confidential: false) }
+
+ it 'prefers the internal parameter' do
+ expect(note).to be_confidential
+ end
+ end
+
+ it 'note has valid content' do
expect(note.note).to eq(opts[:note])
end
it 'note belongs to the correct project' do
- note = described_class.new(project, user, opts).execute
-
expect(note.project).to eq(project)
end
@@ -60,8 +97,6 @@ RSpec.describe Notes::CreateService do
end
context 'issue is an incident' do
- subject { described_class.new(project, user, opts).execute }
-
let(:issue) { create(:incident, project: project) }
it_behaves_like 'an incident management tracked event', :incident_management_incident_comment do
@@ -69,20 +104,31 @@ RSpec.describe Notes::CreateService do
end
end
- it 'tracks issue comment usage data', :clean_gitlab_redis_shared_state do
- event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED
- counter = Gitlab::UsageDataCounters::HLLRedisCounter
+ describe 'event tracking', :snowplow do
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ let(:execute_create_service) { described_class.new(project, user, opts).execute }
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_added_action).with(author: user).and_call_original
- expect do
- described_class.new(project, user, opts).execute
- end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
- end
+ it 'tracks issue comment usage data', :clean_gitlab_redis_shared_state do
+ counter = Gitlab::UsageDataCounters::HLLRedisCounter
+
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_added_action)
+ .with(author: user, project: project)
+ .and_call_original
+ expect do
+ execute_create_service
+ end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
+ end
- it 'does not track merge request usage data' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_create_comment_action)
+ it 'does not track merge request usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_create_comment_action)
- described_class.new(project, user, opts).execute
+ execute_create_service
+ end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_ADDED }
+ subject(:service_action) { execute_create_service }
+ end
end
context 'in a merge request' do
diff --git a/spec/services/notes/destroy_service_spec.rb b/spec/services/notes/destroy_service_spec.rb
index 55acdabef82..be95a4bb181 100644
--- a/spec/services/notes/destroy_service_spec.rb
+++ b/spec/services/notes/destroy_service_spec.rb
@@ -25,15 +25,25 @@ RSpec.describe Notes::DestroyService do
.to change { user.todos_pending_count }.from(1).to(0)
end
- it 'tracks issue comment removal usage data', :clean_gitlab_redis_shared_state do
- note = create(:note, project: project, noteable: issue)
- event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED
- counter = Gitlab::UsageDataCounters::HLLRedisCounter
+ describe 'comment removed event tracking', :snowplow do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_REMOVED }
+ let(:note) { create(:note, project: project, noteable: issue) }
+ let(:service_action) { described_class.new(project, user).execute(note) }
+
+ it 'tracks issue comment removal usage data', :clean_gitlab_redis_shared_state do
+ counter = Gitlab::UsageDataCounters::HLLRedisCounter
+
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_removed_action)
+ .with(author: user, project: project)
+ .and_call_original
+ expect do
+ service_action
+ end.to change { counter.unique_events(event_names: property, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
+ end
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_removed_action).with(author: user).and_call_original
- expect do
- described_class.new(project, user).execute(note)
- end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
+ it_behaves_like 'issue_edit snowplow tracking' do
+ subject(:execute_service_action) { service_action }
+ end
end
it 'tracks merge request usage data' do
diff --git a/spec/services/notes/update_service_spec.rb b/spec/services/notes/update_service_spec.rb
index ae7bea30944..989ca7b8df1 100644
--- a/spec/services/notes/update_service_spec.rb
+++ b/spec/services/notes/update_service_spec.rb
@@ -47,21 +47,31 @@ RSpec.describe Notes::UpdateService do
end
end
- it 'does not track usage data when params is blank' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_comment_edited_action)
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_edit_comment_action)
+ describe 'event tracking', :snowplow do
+ let(:event) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
- update_note({})
- end
+ it 'does not track usage data when params is blank' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_comment_edited_action)
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter).not_to receive(:track_edit_comment_action)
- it 'tracks issue usage data', :clean_gitlab_redis_shared_state do
- event = Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED
- counter = Gitlab::UsageDataCounters::HLLRedisCounter
+ update_note({})
+ end
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_edited_action).with(author: user).and_call_original
- expect do
- update_note(note: 'new text')
- end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
+ it 'tracks issue usage data', :clean_gitlab_redis_shared_state do
+ counter = Gitlab::UsageDataCounters::HLLRedisCounter
+
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_comment_edited_action)
+ .with(author: user, project: project)
+ .and_call_original
+ expect do
+ update_note(note: 'new text')
+ end.to change { counter.unique_events(event_names: event, start_date: 1.day.ago, end_date: 1.day.from_now) }.by(1)
+ end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_COMMENT_EDITED }
+ subject(:service_action) { update_note(note: 'new text') }
+ end
end
context 'when note text was changed' do
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 98fe8a40c61..935dcef1011 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2006,19 +2006,19 @@ RSpec.describe NotificationService, :mailer do
context 'participating' do
it_behaves_like 'participating by assignee notification' do
- let(:participant) { create(:user, username: 'user-participant')}
+ let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) }
end
it_behaves_like 'participating by note notification' do
- let(:participant) { create(:user, username: 'user-participant')}
+ let(:participant) { create(:user, username: 'user-participant') }
let(:issuable) { merge_request }
let(:notification_trigger) { notification.new_merge_request(merge_request, @u_disabled) }
end
context 'by author' do
- let(:participant) { create(:user, username: 'user-participant')}
+ let(:participant) { create(:user, username: 'user-participant') }
before do
merge_request.author = participant
@@ -2657,45 +2657,6 @@ RSpec.describe NotificationService, :mailer do
let(:notification_trigger) { notification.review_requested_of_merge_request(merge_request, current_user, reviewer) }
end
end
-
- describe '#attention_requested_of_merge_request' do
- let_it_be(:current_user) { create(:user) }
- let_it_be(:reviewer) { create(:user) }
- let_it_be(:merge_request) { create(:merge_request, source_project: project, reviewers: [reviewer]) }
-
- it 'sends email to reviewer', :aggregate_failures do
- notification.attention_requested_of_merge_request(merge_request, current_user, reviewer)
-
- merge_request.reviewers.each { |reviewer| should_email(reviewer) }
- should_not_email(merge_request.author)
- should_not_email(@u_watcher)
- should_not_email(@u_participant_mentioned)
- should_not_email(@subscriber)
- should_not_email(@watcher_and_subscriber)
- should_not_email(@u_guest_watcher)
- should_not_email(@u_guest_custom)
- should_not_email(@u_custom_global)
- should_not_email(@unsubscriber)
- should_not_email(@u_participating)
- should_not_email(@u_disabled)
- should_not_email(@u_lazy_participant)
- end
-
- it 'adds "attention requested" reason' do
- notification.attention_requested_of_merge_request(merge_request, current_user, [reviewer])
-
- merge_request.reviewers.each do |reviewer|
- email = find_email_for(reviewer)
-
- expect(email).to have_header('X-GitLab-NotificationReason', NotificationReason::ATTENTION_REQUESTED)
- end
- end
-
- it_behaves_like 'project emails are disabled' do
- let(:notification_target) { merge_request }
- let(:notification_trigger) { notification.attention_requested_of_merge_request(merge_request, current_user, reviewer) }
- end
- end
end
describe 'Projects', :deliver_mails_inline do
diff --git a/spec/services/packages/composer/create_package_service_spec.rb b/spec/services/packages/composer/create_package_service_spec.rb
index b04a6c8382f..26429a7b5d9 100644
--- a/spec/services/packages/composer/create_package_service_spec.rb
+++ b/spec/services/packages/composer/create_package_service_spec.rb
@@ -88,7 +88,7 @@ RSpec.describe Packages::Composer::CreatePackageService do
end
context 'belonging to another project' do
- let(:other_project) { create(:project)}
+ let(:other_project) { create(:project) }
let!(:other_package) { create(:composer_package, name: package_name, version: 'dev-master', project: other_project) }
it 'fails with an error' do
diff --git a/spec/services/packages/create_dependency_service_spec.rb b/spec/services/packages/create_dependency_service_spec.rb
index 55414ea68fe..f95e21cd045 100644
--- a/spec/services/packages/create_dependency_service_spec.rb
+++ b/spec/services/packages/create_dependency_service_spec.rb
@@ -3,7 +3,7 @@ require 'spec_helper'
RSpec.describe Packages::CreateDependencyService do
describe '#execute' do
- let_it_be(:namespace) {create(:namespace)}
+ let_it_be(:namespace) { create(:namespace) }
let_it_be(:version) { '1.0.1' }
let_it_be(:package_name) { "@#{namespace.path}/my-app" }
diff --git a/spec/services/packages/debian/extract_deb_metadata_service_spec.rb b/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
index ee3f3d179dc..66a9ca5f9e0 100644
--- a/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Packages::Debian::ExtractDebMetadataService do
let(:file_name) { 'README.md' }
it 'raise error' do
- expect {subject.execute}.to raise_error(described_class::CommandFailedError, /is not a Debian format archive/i)
+ expect { subject.execute }.to raise_error(described_class::CommandFailedError, /is not a Debian format archive/i)
end
end
end
diff --git a/spec/services/packages/debian/extract_metadata_service_spec.rb b/spec/services/packages/debian/extract_metadata_service_spec.rb
index e3911dbbfe0..02c81ad1644 100644
--- a/spec/services/packages/debian/extract_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_metadata_service_spec.rb
@@ -11,15 +11,10 @@ RSpec.describe Packages::Debian::ExtractMetadataService do
end
RSpec.shared_examples 'Test Debian ExtractMetadata Service' do |expected_file_type, expected_architecture, expected_fields|
- it "returns file_type #{expected_file_type.inspect}" do
+ it "returns file_type #{expected_file_type.inspect}, architecture #{expected_architecture.inspect} and fields #{expected_fields.nil? ? '' : 'including '}#{expected_fields.inspect}", :aggregate_failures do
expect(subject[:file_type]).to eq(expected_file_type)
- end
-
- it "returns architecture #{expected_architecture.inspect}" do
expect(subject[:architecture]).to eq(expected_architecture)
- end
- it "returns fields #{expected_fields.nil? ? '' : 'including '}#{expected_fields.inspect}" do
if expected_fields.nil?
expect(subject[:fields]).to be_nil
else
diff --git a/spec/services/packages/debian/parse_debian822_service_spec.rb b/spec/services/packages/debian/parse_debian822_service_spec.rb
index cad4e81f350..ff146fda250 100644
--- a/spec/services/packages/debian/parse_debian822_service_spec.rb
+++ b/spec/services/packages/debian/parse_debian822_service_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
let(:input) { ' continuation' }
it 'raise error' do
- expect {subject.execute}.to raise_error(described_class::InvalidDebian822Error, 'Parse error. Unexpected continuation line')
+ expect { subject.execute }.to raise_error(described_class::InvalidDebian822Error, 'Parse error. Unexpected continuation line')
end
end
@@ -116,7 +116,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
end
it 'raise error' do
- expect {subject.execute}.to raise_error(described_class::InvalidDebian822Error, "Duplicate field 'Source' in section 'Package: libsample0'")
+ expect { subject.execute }.to raise_error(described_class::InvalidDebian822Error, "Duplicate field 'Source' in section 'Package: libsample0'")
end
end
@@ -128,7 +128,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
end
it 'raise error' do
- expect {subject.execute}.to raise_error(described_class::InvalidDebian822Error, 'Parse error on line Hello')
+ expect { subject.execute }.to raise_error(described_class::InvalidDebian822Error, 'Parse error on line Hello')
end
end
@@ -142,7 +142,7 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
end
it 'raise error' do
- expect {subject.execute}.to raise_error(described_class::InvalidDebian822Error, "Duplicate section 'Package: libsample0'")
+ expect { subject.execute }.to raise_error(described_class::InvalidDebian822Error, "Duplicate section 'Package: libsample0'")
end
end
end
diff --git a/spec/services/packages/debian/sign_distribution_service_spec.rb b/spec/services/packages/debian/sign_distribution_service_spec.rb
index 2aec0e50636..fc070b6e45e 100644
--- a/spec/services/packages/debian/sign_distribution_service_spec.rb
+++ b/spec/services/packages/debian/sign_distribution_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe Packages::Debian::SignDistributionService do
end
context 'with an existing key' do
- let!(:key) { create("debian_#{container_type}_distribution_key", distribution: distribution)}
+ let!(:key) { create("debian_#{container_type}_distribution_key", distribution: distribution) }
it 'returns the content signed', :aggregate_failures do
expect(Packages::Debian::GenerateDistributionKeyService).not_to receive(:new)
diff --git a/spec/services/packages/helm/process_file_service_spec.rb b/spec/services/packages/helm/process_file_service_spec.rb
index d22c1de2335..1be0153a4a5 100644
--- a/spec/services/packages/helm/process_file_service_spec.rb
+++ b/spec/services/packages/helm/process_file_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Packages::Helm::ProcessFileService do
- let(:package) { create(:helm_package, without_package_files: true, status: 'processing')}
+ let(:package) { create(:helm_package, without_package_files: true, status: 'processing') }
let!(:package_file) { create(:helm_package_file, without_loaded_metadatum: true, package: package) }
let(:channel) { 'stable' }
let(:service) { described_class.new(channel, package_file) }
diff --git a/spec/services/packages/npm/create_package_service_spec.rb b/spec/services/packages/npm/create_package_service_spec.rb
index 5b41055397b..a3e59913918 100644
--- a/spec/services/packages/npm/create_package_service_spec.rb
+++ b/spec/services/packages/npm/create_package_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Packages::Npm::CreatePackageService do
- let(:namespace) {create(:namespace)}
+ let(:namespace) { create(:namespace) }
let(:project) { create(:project, namespace: namespace) }
let(:user) { create(:user) }
let(:version) { '1.0.1' }
@@ -129,7 +129,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
describe 'max file size validation' do
- let(:max_file_size) { 5.bytes}
+ let(:max_file_size) { 5.bytes }
shared_examples_for 'max file size validation failure' do
it 'returns a 400 error', :aggregate_failures do
@@ -160,7 +160,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
context "when encoded package data is padded with '='" do
- let(:max_file_size) { 4.bytes}
+ let(:max_file_size) { 4.bytes }
# 'Hello' (size = 5 bytes) => 'SGVsbG8='
let(:encoded_package_data) { 'SGVsbG8=' }
@@ -168,7 +168,7 @@ RSpec.describe Packages::Npm::CreatePackageService do
end
context "when encoded package data is padded with '=='" do
- let(:max_file_size) { 3.bytes}
+ let(:max_file_size) { 3.bytes }
# 'Hell' (size = 4 bytes) => 'SGVsbA=='
let(:encoded_package_data) { 'SGVsbA==' }
diff --git a/spec/services/packages/npm/create_tag_service_spec.rb b/spec/services/packages/npm/create_tag_service_spec.rb
index e7a784068fa..a4b07bf97cc 100644
--- a/spec/services/packages/npm/create_tag_service_spec.rb
+++ b/spec/services/packages/npm/create_tag_service_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Packages::Npm::CreateTagService do
shared_examples 'it creates the tag' do
it { expect { subject }.to change { Packages::Tag.count }.by(1) }
it { expect(subject.name).to eq(tag_name) }
+
it 'adds tag to the package' do
tag = subject
expect(package.reload.tags).to match_array([tag])
diff --git a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
index f23ed0e5fbc..bb84e0cd361 100644
--- a/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
+++ b/spec/services/packages/rubygems/dependency_resolver_service_spec.rb
@@ -47,9 +47,9 @@ RSpec.describe Packages::Rubygems::DependencyResolverService do
end
context 'package with dependencies' do
- let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package)}
- let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package)}
- let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package) }
+ let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package) }
+ let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package) }
it 'returns a set of dependencies' do
expected_result = [{
@@ -68,11 +68,11 @@ RSpec.describe Packages::Rubygems::DependencyResolverService do
end
context 'package with multiple versions' do
- let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package)}
- let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package)}
- let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package)}
+ let(:dependency_link) { create(:packages_dependency_link, :rubygems, package: package) }
+ let(:dependency_link2) { create(:packages_dependency_link, :rubygems, package: package) }
+ let(:dependency_link3) { create(:packages_dependency_link, :rubygems, package: package) }
let(:package2) { create(:package, project: project, name: package.name, version: '9.9.9') }
- let(:dependency_link4) { create(:packages_dependency_link, :rubygems, package: package2)}
+ let(:dependency_link4) { create(:packages_dependency_link, :rubygems, package: package2) }
it 'returns a set of dependencies' do
expected_result = [{
diff --git a/spec/services/pages/delete_service_spec.rb b/spec/services/pages/delete_service_spec.rb
index 29d9a47c72e..8b9e72ac9b1 100644
--- a/spec/services/pages/delete_service_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Pages::DeleteService do
let_it_be(:admin) { create(:admin) }
- let(:project) { create(:project, path: "my.project")}
- let(:service) { described_class.new(project, admin)}
+ let(:project) { create(:project, path: "my.project") }
+ let(:service) { described_class.new(project, admin) }
before do
project.mark_pages_as_deployed
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 79654c9b190..ecb445fa441 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -135,7 +135,7 @@ RSpec.describe PagesDomains::ObtainLetsEncryptCertificateService do
cert.add_extension ef.create_extension("authorityKeyIdentifier",
"keyid:always,issuer:always")
- cert.sign key, OpenSSL::Digest.new('SHA1')
+ cert.sign key, OpenSSL::Digest.new('SHA256')
cert.to_pem
end
diff --git a/spec/services/personal_access_tokens/revoke_service_spec.rb b/spec/services/personal_access_tokens/revoke_service_spec.rb
index a25484e218e..f16b6f00a0a 100644
--- a/spec/services/personal_access_tokens/revoke_service_spec.rb
+++ b/spec/services/personal_access_tokens/revoke_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe PersonalAccessTokens::RevokeService do
shared_examples_for 'a successfully revoked token' do
it { expect(subject.success?).to be true }
it { expect(service.token.revoked?).to be true }
+
it 'logs the event' do
expect(Gitlab::AppLogger).to receive(:info).with(/PAT REVOCATION: revoked_by: '#{current_user.username}', revoked_for: '#{token.user.username}', token_id: '\d+'/)
diff --git a/spec/services/projects/after_rename_service_spec.rb b/spec/services/projects/after_rename_service_spec.rb
index 9dc15131bc5..edf4bbe0f7f 100644
--- a/spec/services/projects/after_rename_service_spec.rb
+++ b/spec/services/projects/after_rename_service_spec.rb
@@ -3,7 +3,6 @@
require 'spec_helper'
RSpec.describe Projects::AfterRenameService do
- let(:rugged_config) { rugged_repo(project.repository).config }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::Hashed.new(project) }
let!(:path_before_rename) { project.path }
@@ -71,10 +70,10 @@ RSpec.describe Projects::AfterRenameService do
end
end
- it 'updates project full path in .git/config' do
+ it 'updates project full path in gitaly' do
service_execute
- expect(rugged_config['gitlab.fullpath']).to eq(project.full_path)
+ expect(project.repository.full_path).to eq(project.full_path)
end
it 'updates storage location' do
@@ -173,10 +172,10 @@ RSpec.describe Projects::AfterRenameService do
end
end
- it 'updates project full path in .git/config' do
+ it 'updates project full path in gitaly' do
service_execute
- expect(rugged_config['gitlab.fullpath']).to eq(project.full_path)
+ expect(project.repository.full_path).to eq(project.full_path)
end
it 'updates storage location' do
diff --git a/spec/services/projects/alerting/notify_service_spec.rb b/spec/services/projects/alerting/notify_service_spec.rb
index feae8f3967c..aa2ef39bf98 100644
--- a/spec/services/projects/alerting/notify_service_spec.rb
+++ b/spec/services/projects/alerting/notify_service_spec.rb
@@ -56,6 +56,7 @@ RSpec.describe Projects::Alerting::NotifyService do
it_behaves_like 'processes new firing alert'
it_behaves_like 'properly assigns the alert properties'
+ include_examples 'handles race condition in alert creation'
it 'passes the integration to alert processing' do
expect(Gitlab::AlertManagement::Payload)
@@ -118,10 +119,10 @@ RSpec.describe Projects::Alerting::NotifyService do
end
context 'with overlong payload' do
- let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
+ let(:payload_raw) { { 'the-payload-is-too-big' => true } }
before do
- allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
+ stub_const('::Gitlab::Utils::DeepSize::DEFAULT_MAX_DEPTH', 0)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
diff --git a/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb b/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb
index 22cada7816b..4de36452684 100644
--- a/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb
+++ b/spec/services/projects/container_repository/third_party/delete_tags_service_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Projects::ContainerRepository::ThirdParty::DeleteTagsService do
stub_put_manifest_request('Ba', 500, {})
end
- it { is_expected.to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}")}
+ it { is_expected.to eq(status: :error, message: "could not delete tags: #{tags.join(', ')}") }
context 'when a large list of tag updates fails' do
let(:tags) { Array.new(1000) { |i| "tag_#{i}" } }
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 59dee209ff9..e112c1e2497 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -4,7 +4,6 @@ require 'spec_helper'
RSpec.describe Projects::CreateService, '#execute' do
include ExternalAuthorizationServiceHelpers
- include GitHelpers
let(:user) { create :user }
let(:project_name) { 'GitLab' }
@@ -254,6 +253,39 @@ RSpec.describe Projects::CreateService, '#execute' do
end
end
+ context 'user with project limit' do
+ let_it_be(:user_with_projects_limit) { create(:user, projects_limit: 0) }
+
+ let(:params) { opts.merge!(namespace_id: target_namespace.id) }
+
+ subject(:project) { create_project(user_with_projects_limit, params) }
+
+ context 'under personal namespace' do
+ let(:target_namespace) { user_with_projects_limit.namespace }
+
+ it 'cannot create a project' do
+ expect(project.errors.errors.length).to eq 1
+ expect(project.errors.messages[:limit_reached].first).to eq(_('Personal project creation is not allowed. Please contact your administrator with questions'))
+ end
+ end
+
+ context 'under group namespace' do
+ let_it_be(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user_with_projects_limit)
+ end
+ end
+
+ let(:target_namespace) { group }
+
+ it 'can create a project' do
+ expect(project).to be_valid
+ expect(project).to be_saved
+ expect(project.errors.errors.length).to eq 0
+ end
+ end
+ end
+
context 'membership overrides', :sidekiq_inline do
let_it_be(:group) { create(:group, :private) }
let_it_be(:subgroup_for_projects) { create(:group, :private, parent: group) }
@@ -769,11 +801,10 @@ RSpec.describe Projects::CreateService, '#execute' do
create_project(user, opts)
end
- it 'writes project full path to .git/config' do
+ it 'writes project full path to gitaly' do
project = create_project(user, opts)
- rugged = rugged_repo(project.repository)
- expect(rugged.config['gitlab.fullpath']).to eq project.full_path
+ expect(project.repository.full_path).to eq project.full_path
end
it 'triggers PostCreationWorker' do
diff --git a/spec/services/projects/enable_deploy_key_service_spec.rb b/spec/services/projects/enable_deploy_key_service_spec.rb
index f297ec374cf..c0b3992037e 100644
--- a/spec/services/projects/enable_deploy_key_service_spec.rb
+++ b/spec/services/projects/enable_deploy_key_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::EnableDeployKeyService do
let(:deploy_key) { create(:deploy_key, public: true) }
let(:project) { create(:project) }
- let(:user) { project.creator}
+ let(:user) { project.creator }
let!(:params) { { key_id: deploy_key.id } }
it 'enables the key' do
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index d0064873972..65da1976dc2 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -68,12 +68,10 @@ RSpec.describe Projects::HashedStorage::MigrateRepositoryService do
service.execute
end
- it 'writes project full path to .git/config' do
+ it 'writes project full path to gitaly' do
service.execute
- rugged_config = rugged_repo(project.repository).config['gitlab.fullpath']
-
- expect(rugged_config).to eq project.full_path
+ expect(project.repository.full_path).to eq project.full_path
end
end
diff --git a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
index 23e776b72bc..385c03e6308 100644
--- a/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/rollback_repository_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab_redis_shared_state do
- include GitHelpers
-
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project) { create(:project, :repository, :wiki_repo, :design_repo, storage_version: ::Project::HASHED_STORAGE_FEATURES[:repository]) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
@@ -68,12 +66,10 @@ RSpec.describe Projects::HashedStorage::RollbackRepositoryService, :clean_gitlab
service.execute
end
- it 'writes project full path to .git/config' do
+ it 'writes project full path to gitaly' do
service.execute
- rugged_config = rugged_repo(project.repository).config['gitlab.fullpath']
-
- expect(rugged_config).to eq project.full_path
+ expect(project.repository.full_path).to eq project.full_path
end
end
diff --git a/spec/services/projects/import_export/export_service_spec.rb b/spec/services/projects/import_export/export_service_spec.rb
index 54abbc04084..285687505e9 100644
--- a/spec/services/projects/import_export/export_service_spec.rb
+++ b/spec/services/projects/import_export/export_service_spec.rb
@@ -89,7 +89,21 @@ RSpec.describe Projects::ImportExport::ExportService do
context 'when all saver services succeed' do
before do
- allow(service).to receive(:save_services).and_return(true)
+ allow(service).to receive(:save_exporters).and_return(true)
+ end
+
+ it 'logs a successful message' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
+
+ expect(service.instance_variable_get(:@logger)).to receive(:info).ordered.with(
+ hash_including({ message: 'Project export started', project_id: project.id })
+ )
+
+ expect(service.instance_variable_get(:@logger)).to receive(:info).ordered.with(
+ hash_including({ message: 'Project successfully exported', project_id: project.id })
+ )
+
+ service.execute
end
it 'saves the project in the file system' do
@@ -111,6 +125,7 @@ RSpec.describe Projects::ImportExport::ExportService do
end
it 'calls the after export strategy' do
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
expect(after_export_strategy).to receive(:execute)
service.execute(after_export_strategy)
@@ -119,7 +134,7 @@ RSpec.describe Projects::ImportExport::ExportService do
context 'when after export strategy fails' do
before do
allow(after_export_strategy).to receive(:execute).and_return(false)
- expect(Gitlab::ImportExport::Saver).to receive(:save).with(exportable: project, shared: shared).and_return(true)
+ allow(Gitlab::ImportExport::Saver).to receive(:save).and_return(true)
end
after do
@@ -140,7 +155,9 @@ RSpec.describe Projects::ImportExport::ExportService do
end
it 'notifies logger' do
- expect(service.instance_variable_get(:@logger)).to receive(:error)
+ expect(service.instance_variable_get(:@logger)).to receive(:error).with(
+ hash_including({ message: 'Project export error', project_id: project.id })
+ )
end
end
end
diff --git a/spec/services/projects/import_export/relation_export_service_spec.rb b/spec/services/projects/import_export/relation_export_service_spec.rb
new file mode 100644
index 00000000000..94f5653ee7d
--- /dev/null
+++ b/spec/services/projects/import_export/relation_export_service_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::RelationExportService do
+ using RSpec::Parameterized::TableSyntax
+
+ subject(:service) { described_class.new(relation_export, 'jid') }
+
+ let_it_be(:project_export_job) { create(:project_export_job) }
+ let_it_be(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
+ let_it_be(:archive_path) { "#{Dir.tmpdir}/project_archive_spec" }
+
+ let(:relation_export) { create(:project_relation_export, relation: relation, project_export_job: project_export_job) }
+
+ before do
+ stub_uploads_object_storage(ImportExportUploader, enabled: false)
+
+ allow(project_export_job.project.import_export_shared).to receive(:export_path).and_return(export_path)
+ allow(project_export_job.project.import_export_shared).to receive(:archive_path).and_return(archive_path)
+ allow(FileUtils).to receive(:remove_entry).with(any_args).and_call_original
+ end
+
+ describe '#execute' do
+ let(:relation) { 'labels' }
+
+ it 'removes temporary paths used to export files' do
+ expect(FileUtils).to receive(:remove_entry).with(export_path)
+ expect(FileUtils).to receive(:remove_entry).with(archive_path)
+
+ service.execute
+ end
+
+ context 'when saver fails to export relation' do
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::Project::RelationSaver) do |saver|
+ allow(saver).to receive(:save).and_return(false)
+ end
+ end
+
+ it 'flags export as failed' do
+ service.execute
+
+ expect(relation_export.failed?).to eq(true)
+ end
+
+ it 'logs failed message' do
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ export_error: '',
+ message: 'Project relation export failed',
+ project_export_job_id: project_export_job.id,
+ project_id: project_export_job.project.id,
+ project_name: project_export_job.project.name
+ )
+ end
+
+ service.execute
+ end
+ end
+
+ context 'when an exception is raised' do
+ before do
+ allow_next_instance_of(Gitlab::ImportExport::Project::RelationSaver) do |saver|
+ allow(saver).to receive(:save).and_raise('Error!')
+ end
+ end
+
+ it 'flags export as failed' do
+ service.execute
+
+ expect(relation_export.failed?).to eq(true)
+ expect(relation_export.export_error).to eq('Error!')
+ end
+
+ it 'logs exception error message' do
+ expect_next_instance_of(Gitlab::Export::Logger) do |logger|
+ expect(logger).to receive(:error).with(
+ export_error: 'Error!',
+ message: 'Project relation export failed',
+ project_export_job_id: project_export_job.id,
+ project_id: project_export_job.project.id,
+ project_name: project_export_job.project.name
+ )
+ end
+
+ service.execute
+ end
+ end
+
+ describe 'relation name and saver class' do
+ where(:relation_name, :saver) do
+ Projects::ImportExport::RelationExport::UPLOADS_RELATION | Gitlab::ImportExport::UploadsSaver
+ Projects::ImportExport::RelationExport::REPOSITORY_RELATION | Gitlab::ImportExport::RepoSaver
+ Projects::ImportExport::RelationExport::WIKI_REPOSITORY_RELATION | Gitlab::ImportExport::WikiRepoSaver
+ Projects::ImportExport::RelationExport::LFS_OBJECTS_RELATION | Gitlab::ImportExport::LfsSaver
+ Projects::ImportExport::RelationExport::SNIPPETS_REPOSITORY_RELATION | Gitlab::ImportExport::SnippetsRepoSaver
+ Projects::ImportExport::RelationExport::DESIGN_REPOSITORY_RELATION | Gitlab::ImportExport::DesignRepoSaver
+ Projects::ImportExport::RelationExport::ROOT_RELATION | Gitlab::ImportExport::Project::RelationSaver
+ 'labels' | Gitlab::ImportExport::Project::RelationSaver
+ end
+
+ with_them do
+ let(:relation) { relation_name }
+
+ it 'exports relation using correct saver' do
+ expect(saver).to receive(:new).and_call_original
+
+ service.execute
+ end
+
+ it 'assigns finished status and relation file' do
+ service.execute
+
+ expect(relation_export.finished?).to eq(true)
+ expect(relation_export.upload.export_file.filename).to eq("#{relation}.tar.gz")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
index 047ebe65dff..d472d6493c3 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_link_list_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadLinkListService do
let(:lfs_endpoint) { "#{import_url}/info/lfs/objects/batch" }
let!(:project) { create(:project, import_url: import_url) }
let(:new_oids) { { 'oid1' => 123, 'oid2' => 125 } }
- let(:headers) { { 'X-Some-Header' => '456' }}
+ let(:headers) { { 'X-Some-Header' => '456' } }
let(:remote_uri) { URI.parse(lfs_endpoint) }
let(:request_object) { HTTParty::Request.new(Net::HTTP::Post, '/') }
diff --git a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
index 04c6349bf52..b67b4d64c1d 100644
--- a/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_download_service_spec.rb
@@ -250,7 +250,7 @@ RSpec.describe Projects::LfsPointers::LfsDownloadService do
end
context 'that is not blocked' do
- let(:redirect_link) { "http://example.com/"}
+ let(:redirect_link) { "http://example.com/" }
before do
stub_full_request(download_link).to_return(status: 301, headers: { 'Location' => redirect_link })
diff --git a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
index 981d7027a17..adcc2b85706 100644
--- a/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
+++ b/spec/services/projects/lfs_pointers/lfs_object_download_list_service_spec.rb
@@ -3,8 +3,8 @@ require 'spec_helper'
RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
let(:import_url) { 'http://www.gitlab.com/demo/repo.git' }
- let(:default_endpoint) { "#{import_url}/info/lfs/objects/batch"}
- let(:group) { create(:group, lfs_enabled: true)}
+ let(:default_endpoint) { "#{import_url}/info/lfs/objects/batch" }
+ let(:group) { create(:group, lfs_enabled: true) }
let!(:project) { create(:project, namespace: group, import_url: import_url, lfs_enabled: true) }
let!(:lfs_objects_project) { create_list(:lfs_objects_project, 2, project: project) }
let!(:existing_lfs_objects) { LfsObject.pluck(:oid, :size).to_h }
@@ -75,7 +75,7 @@ RSpec.describe Projects::LfsPointers::LfsObjectDownloadListService do
end
context 'when import url has credentials' do
- let(:import_url) { 'http://user:password@www.gitlab.com/demo/repo.git'}
+ let(:import_url) { 'http://user:password@www.gitlab.com/demo/repo.git' }
it 'adds the credentials to the new endpoint' do
expect(Projects::LfsPointers::LfsDownloadLinkListService)
diff --git a/spec/services/projects/participants_service_spec.rb b/spec/services/projects/participants_service_spec.rb
index 61edfd23700..fc745cd669f 100644
--- a/spec/services/projects/participants_service_spec.rb
+++ b/spec/services/projects/participants_service_spec.rb
@@ -107,7 +107,7 @@ RSpec.describe Projects::ParticipantsService do
shared_examples 'return project members' do
context 'when there is a project in group namespace' do
let_it_be(:public_group) { create(:group, :public) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group) }
let_it_be(:public_group_owner) { create(:user) }
@@ -125,9 +125,9 @@ RSpec.describe Projects::ParticipantsService do
context 'when there is a private group and a public project' do
let_it_be(:public_group) { create(:group, :public) }
let_it_be(:private_group) { create(:group, :private, :nested) }
- let_it_be(:public_project) { create(:project, :public, namespace: public_group)}
+ let_it_be(:public_project) { create(:project, :public, namespace: public_group) }
- let_it_be(:project_issue) { create(:issue, project: public_project)}
+ let_it_be(:project_issue) { create(:issue, project: public_project) }
let_it_be(:public_group_owner) { create(:user) }
let_it_be(:private_group_member) { create(:user) }
diff --git a/spec/services/projects/prometheus/alerts/notify_service_spec.rb b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
index 6f760e6dbfa..7bf6dfd0fd8 100644
--- a/spec/services/projects/prometheus/alerts/notify_service_spec.rb
+++ b/spec/services/projects/prometheus/alerts/notify_service_spec.rb
@@ -177,6 +177,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
it { is_expected.to be_success }
+
include_examples 'does not send alert notification emails'
include_examples 'does not process incident issues'
end
@@ -187,6 +188,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
it { is_expected.to be_success }
+
include_examples 'does not send alert notification emails'
end
@@ -196,6 +198,7 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
it { is_expected.to be_success }
+
include_examples 'does not process incident issues'
end
end
@@ -313,11 +316,11 @@ RSpec.describe Projects::Prometheus::Alerts::NotifyService do
end
context 'when the payload is too big' do
- let(:payload) { { 'the-payload-is-too-big' => true } }
- let(:deep_size_object) { instance_double(Gitlab::Utils::DeepSize, valid?: false) }
+ let(:payload_raw) { { 'the-payload-is-too-big' => true } }
+ let(:payload) { ActionController::Parameters.new(payload_raw).permit! }
before do
- allow(Gitlab::Utils::DeepSize).to receive(:new).and_return(deep_size_object)
+ stub_const('::Gitlab::Utils::DeepSize::DEFAULT_MAX_DEPTH', 0)
end
it_behaves_like 'alerts service responds with an error and takes no actions', :bad_request
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index ecf9f92d74f..8f505c31c5a 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe Projects::TransferService do
- include GitHelpers
-
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:group_integration) { create(:integrations_slack, :group, group: group, webhook: 'http://group.slack.com') }
@@ -64,6 +62,30 @@ RSpec.describe Projects::TransferService do
expect(project.namespace).to eq(group)
end
+ context 'EventStore' do
+ let(:group) do
+ create(:group, :nested).tap { |g| g.add_owner(user) }
+ end
+
+ let(:target) do
+ create(:group, :nested).tap { |g| g.add_owner(user) }
+ end
+
+ let(:project) { create(:project, namespace: group) }
+
+ it 'publishes a ProjectTransferedEvent' do
+ expect { execute_transfer }
+ .to publish_event(Projects::ProjectTransferedEvent)
+ .with(
+ project_id: project.id,
+ old_namespace_id: group.id,
+ old_root_namespace_id: group.root_ancestor.id,
+ new_namespace_id: target.id,
+ new_root_namespace_id: target.root_ancestor.id
+ )
+ end
+ end
+
context 'when project has an associated project namespace' do
it 'keeps project namespace in sync with project' do
transfer_result = execute_transfer
@@ -178,10 +200,10 @@ RSpec.describe Projects::TransferService do
expect(project.disk_path).to start_with(group.path)
end
- it 'updates project full path in .git/config' do
+ it 'updates project full path in gitaly' do
execute_transfer
- expect(rugged_config['gitlab.fullpath']).to eq "#{group.full_path}/#{project.path}"
+ expect(project.repository.full_path).to eq "#{group.full_path}/#{project.path}"
end
it 'updates storage location' do
@@ -272,10 +294,10 @@ RSpec.describe Projects::TransferService do
expect(original_path).to eq current_path
end
- it 'rolls back project full path in .git/config' do
+ it 'rolls back project full path in gitaly' do
attempt_project_transfer
- expect(rugged_config['gitlab.fullpath']).to eq project.full_path
+ expect(project.repository.full_path).to eq project.full_path
end
it "doesn't send move notifications" do
@@ -299,6 +321,11 @@ RSpec.describe Projects::TransferService do
)
end
+ it 'does not publish a ProjectTransferedEvent' do
+ expect { attempt_project_transfer }
+ .not_to publish_event(Projects::ProjectTransferedEvent)
+ end
+
context 'when project has pending builds', :sidekiq_inline do
let!(:other_project) { create(:project) }
let!(:pending_build) { create(:ci_pending_build, project: project.reload) }
@@ -741,10 +768,6 @@ RSpec.describe Projects::TransferService do
end
end
- def rugged_config
- rugged_repo(project.repository).config
- end
-
def project_namespace_in_sync(group)
project.reload
expect(project.namespace).to eq(group)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index ca838be0fa8..85d3e99109d 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -348,6 +348,18 @@ RSpec.describe Projects::UpdateService do
end
end
+ context 'when archiving a project' do
+ it 'publishes a ProjectTransferedEvent' do
+ expect { update_project(project, user, archived: true) }
+ .to publish_event(Projects::ProjectArchivedEvent)
+ .with(
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_namespace.id
+ )
+ end
+ end
+
context 'when changing operations feature visibility' do
let(:feature_params) { { operations_access_level: ProjectFeature::DISABLED } }
diff --git a/spec/services/projects/update_statistics_service_spec.rb b/spec/services/projects/update_statistics_service_spec.rb
index 6987185b549..1cc69e7e2fe 100644
--- a/spec/services/projects/update_statistics_service_spec.rb
+++ b/spec/services/projects/update_statistics_service_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::UpdateStatisticsService do
using RSpec::Parameterized::TableSyntax
- let(:service) { described_class.new(project, nil, statistics: statistics)}
+ let(:service) { described_class.new(project, nil, statistics: statistics) }
let(:statistics) { %w(repository_size) }
describe '#execute' do
diff --git a/spec/services/protected_branches/cache_service_spec.rb b/spec/services/protected_branches/cache_service_spec.rb
new file mode 100644
index 00000000000..4fa7553c23d
--- /dev/null
+++ b/spec/services/protected_branches/cache_service_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+# rubocop:disable Style/RedundantFetchBlock
+#
+require 'spec_helper'
+
+RSpec.describe ProtectedBranches::CacheService, :clean_gitlab_redis_cache do
+ subject(:service) { described_class.new(project, user) }
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.first_owner }
+
+ let(:immediate_expiration) { 0 }
+
+ describe '#fetch' do
+ it 'caches the value' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ # Uses cached values
+ expect(service.fetch('main') { false }).to eq(true)
+ expect(service.fetch('not-found') { true }).to eq(false)
+ end
+
+ it 'sets expiry on the key' do
+ stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
+
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
+ end
+
+ it 'does not set an expiry on the key after the hash is already created' do
+ expect(service.fetch('main') { true }).to eq(true)
+
+ stub_const("#{described_class.name}::CACHE_EXPIRE_IN", immediate_expiration)
+
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ expect(service.fetch('main') { false }).to eq(true)
+ expect(service.fetch('not-found') { true }).to eq(false)
+ end
+
+ context 'when CACHE_LIMIT is exceeded' do
+ before do
+ stub_const("#{described_class.name}::CACHE_LIMIT", 2)
+ end
+
+ it 'recreates cache' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ # Uses cached values
+ expect(service.fetch('main') { false }).to eq(true)
+ expect(service.fetch('not-found') { true }).to eq(false)
+
+ # Overflow
+ expect(service.fetch('new-branch') { true }).to eq(true)
+
+ # Refreshes values
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
+ end
+ end
+
+ context 'when dry_run is on' do
+ it 'does not use cached value' do
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ end
+
+ context 'when cache mismatch' do
+ it 'logs an error' do
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+
+ expect(Gitlab::AppLogger).to receive(:error).with(
+ 'class' => described_class.name,
+ 'message' => /Cache mismatch/,
+ 'project_id' => project.id,
+ 'project_path' => project.full_path
+ )
+
+ expect(service.fetch('main', dry_run: true) { false }).to eq(false)
+ end
+ end
+
+ context 'when cache matches' do
+ it 'does not log an error' do
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+
+ expect(Gitlab::AppLogger).not_to receive(:error)
+
+ expect(service.fetch('main', dry_run: true) { true }).to eq(true)
+ end
+ end
+ end
+ end
+
+ describe '#refresh' do
+ it 'clears cached values' do
+ expect(service.fetch('main') { true }).to eq(true)
+ expect(service.fetch('not-found') { false }).to eq(false)
+
+ service.refresh
+
+ # Recreates cache
+ expect(service.fetch('main') { false }).to eq(false)
+ expect(service.fetch('not-found') { true }).to eq(true)
+ end
+ end
+end
+# rubocop:enable Style/RedundantFetchBlock
diff --git a/spec/services/protected_branches/create_service_spec.rb b/spec/services/protected_branches/create_service_spec.rb
index 3ac42d41377..b42524e761c 100644
--- a/spec/services/protected_branches/create_service_spec.rb
+++ b/spec/services/protected_branches/create_service_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::CreateService do
- let(:project) { create(:project) }
+ let_it_be_with_reload(:project) { create(:project) }
+
let(:user) { project.first_owner }
let(:params) do
{
@@ -13,22 +14,28 @@ RSpec.describe ProtectedBranches::CreateService do
}
end
+ subject(:service) { described_class.new(project, user, params) }
+
describe '#execute' do
let(:name) { 'master' }
- subject(:service) { described_class.new(project, user, params) }
-
it 'creates a new protected branch' do
expect { service.execute }.to change(ProtectedBranch, :count).by(1)
expect(project.protected_branches.last.push_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
expect(project.protected_branches.last.merge_access_levels.map(&:access_level)).to eq([Gitlab::Access::MAINTAINER])
end
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
+ service.execute
+ end
+
context 'when protecting a branch with a name that contains HTML tags' do
let(:name) { 'foo<b>bar<\b>' }
- subject(:service) { described_class.new(project, user, params) }
-
it 'creates a new protected branch' do
expect { service.execute }.to change(ProtectedBranch, :count).by(1)
expect(project.protected_branches.last.name).to eq(name)
@@ -52,16 +59,18 @@ RSpec.describe ProtectedBranches::CreateService do
end
context 'when a policy restricts rule creation' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
-
it "prevents creation of the protected branch rule" do
+ disallow(:create_protected_branch, an_instance_of(ProtectedBranch))
+
expect do
service.execute
end.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
+
+ def disallow(ability, protected_branch)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, ability, protected_branch).and_return(false)
+ end
end
diff --git a/spec/services/protected_branches/destroy_service_spec.rb b/spec/services/protected_branches/destroy_service_spec.rb
index 4e55c72f312..9fa07820148 100644
--- a/spec/services/protected_branches/destroy_service_spec.rb
+++ b/spec/services/protected_branches/destroy_service_spec.rb
@@ -3,30 +3,41 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::DestroyService do
- let(:protected_branch) { create(:protected_branch) }
- let(:project) { protected_branch.project }
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:protected_branch) { create(:protected_branch, project: project) }
let(:user) { project.first_owner }
- describe '#execute' do
- subject(:service) { described_class.new(project, user) }
+ subject(:service) { described_class.new(project, user) }
+ describe '#execute' do
it 'destroys a protected branch' do
service.execute(protected_branch)
expect(protected_branch).to be_destroyed
end
- context 'when a policy restricts rule deletion' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
end
+ service.execute(protected_branch)
+ end
+
+ context 'when a policy restricts rule deletion' do
it "prevents deletion of the protected branch rule" do
+ disallow(:destroy_protected_branch, protected_branch)
+
expect do
service.execute(protected_branch)
end.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
+
+ def disallow(ability, protected_branch)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, ability, protected_branch).and_return(false)
+ end
end
diff --git a/spec/services/protected_branches/update_service_spec.rb b/spec/services/protected_branches/update_service_spec.rb
index 4405af35c37..c4fe4d78070 100644
--- a/spec/services/protected_branches/update_service_spec.rb
+++ b/spec/services/protected_branches/update_service_spec.rb
@@ -3,27 +3,34 @@
require 'spec_helper'
RSpec.describe ProtectedBranches::UpdateService do
- let(:protected_branch) { create(:protected_branch) }
- let(:project) { protected_branch.project }
+ let_it_be_with_reload(:project) { create(:project) }
+
+ let(:protected_branch) { create(:protected_branch, project: project) }
let(:user) { project.first_owner }
let(:params) { { name: new_name } }
+ subject(:service) { described_class.new(project, user, params) }
+
describe '#execute' do
let(:new_name) { 'new protected branch name' }
let(:result) { service.execute(protected_branch) }
- subject(:service) { described_class.new(project, user, params) }
-
it 'updates a protected branch' do
expect(result.reload.name).to eq(params[:name])
end
+ it 'refreshes the cache' do
+ expect_next_instance_of(ProtectedBranches::CacheService) do |cache_service|
+ expect(cache_service).to receive(:refresh)
+ end
+
+ result
+ end
+
context 'when updating name of a protected branch to one that contains HTML tags' do
let(:new_name) { 'foo<b>bar<\b>' }
let(:result) { service.execute(protected_branch) }
- subject(:service) { described_class.new(project, user, params) }
-
it 'updates a protected branch' do
expect(result.reload.name).to eq(new_name)
end
@@ -37,15 +44,17 @@ RSpec.describe ProtectedBranches::UpdateService do
end
end
- context 'when a policy restricts rule creation' do
- before do
- policy = instance_double(ProtectedBranchPolicy, allowed?: false)
- expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
- end
+ context 'when a policy restricts rule update' do
+ it "prevents update of the protected branch rule" do
+ disallow(:update_protected_branch, protected_branch)
- it "prevents creation of the protected branch rule" do
expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
+
+ def disallow(ability, protected_branch)
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, ability, protected_branch).and_return(false)
+ end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 3f11eaa7e93..2d38d968ce4 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -810,38 +810,6 @@ RSpec.describe QuickActions::InterpretService do
end
end
- shared_examples 'attention command' do
- it 'updates reviewers attention status' do
- _, _, message = service.execute(content, issuable)
-
- expect(message).to eq("Requested attention from #{developer.to_reference}.")
-
- reviewer.reload
-
- expect(reviewer).to be_attention_requested
- end
-
- it 'supports attn alias' do
- attn_cmd = content.gsub(/attention/, 'attn')
- _, _, message = service.execute(attn_cmd, issuable)
-
- expect(message).to eq("Requested attention from #{developer.to_reference}.")
-
- reviewer.reload
-
- expect(reviewer).to be_attention_requested
- end
- end
-
- shared_examples 'remove attention command' do
- it 'updates reviewers attention status' do
- _, _, message = service.execute(content, issuable)
-
- expect(message).to eq("Removed attention from #{developer.to_reference}.")
- expect(reviewer).not_to be_attention_requested
- end
- end
-
it_behaves_like 'reopen command' do
let(:content) { '/reopen' }
let(:issuable) { issue }
@@ -1888,7 +1856,7 @@ RSpec.describe QuickActions::InterpretService do
context '/target_branch command' do
let(:non_empty_project) { create(:project, :repository) }
let(:another_merge_request) { create(:merge_request, author: developer, source_project: non_empty_project) }
- let(:service) { described_class.new(non_empty_project, developer)}
+ let(:service) { described_class.new(non_empty_project, developer) }
it 'updates target_branch if /target_branch command is executed' do
_, updates, _ = service.execute('/target_branch merge-test', merge_request)
@@ -2481,82 +2449,6 @@ RSpec.describe QuickActions::InterpretService do
expect(message).to eq('One or more contacts were successfully removed.')
end
end
-
- describe 'attention command' do
- let(:issuable) { create(:merge_request, reviewers: [developer], source_project: project) }
- let(:reviewer) { issuable.merge_request_reviewers.find_by(user_id: developer.id) }
- let(:content) { "/attention @#{developer.username}" }
-
- context 'with one user' do
- before do
- reviewer.update!(state: :reviewed)
- end
-
- it_behaves_like 'attention command'
- end
-
- context 'with no user' do
- let(:content) { "/attention" }
-
- it_behaves_like 'failed command', 'Failed to request attention because no user was found.'
- end
-
- context 'with incorrect permissions' do
- let(:service) { described_class.new(project, create(:user)) }
-
- it_behaves_like 'failed command', 'Could not apply attention command.'
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it_behaves_like 'failed command', 'Could not apply attention command.'
- end
-
- context 'with an issue instead of a merge request' do
- let(:issuable) { issue }
-
- it_behaves_like 'failed command', 'Could not apply attention command.'
- end
- end
-
- describe 'remove attention command' do
- let(:issuable) { create(:merge_request, reviewers: [developer], source_project: project) }
- let(:reviewer) { issuable.merge_request_reviewers.find_by(user_id: developer.id) }
- let(:content) { "/remove_attention @#{developer.username}" }
-
- context 'with one user' do
- it_behaves_like 'remove attention command'
- end
-
- context 'with no user' do
- let(:content) { "/remove_attention" }
-
- it_behaves_like 'failed command', 'Failed to remove attention because no user was found.'
- end
-
- context 'with incorrect permissions' do
- let(:service) { described_class.new(project, create(:user)) }
-
- it_behaves_like 'failed command', 'Could not apply remove_attention command.'
- end
-
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(mr_attention_requests: false)
- end
-
- it_behaves_like 'failed command', 'Could not apply remove_attention command.'
- end
-
- context 'with an issue instead of a merge request' do
- let(:issuable) { issue }
-
- it_behaves_like 'failed command', 'Could not apply remove_attention command.'
- end
- end
end
describe '#explain' do
diff --git a/spec/services/releases/create_service_spec.rb b/spec/services/releases/create_service_spec.rb
index 566d73a3b75..2421fab0eec 100644
--- a/spec/services/releases/create_service_spec.rb
+++ b/spec/services/releases/create_service_spec.rb
@@ -111,14 +111,6 @@ RSpec.describe Releases::CreateService do
expect(result[:message]).to eq("Milestone(s) not found: #{inexistent_milestone_tag}")
end
end
- end
-
- describe '#find_or_build_release' do
- it 'does not save the built release' do
- service.find_or_build_release
-
- expect(project.releases.count).to eq(0)
- end
context 'when existing milestone is passed in' do
let(:title) { 'v1.0' }
diff --git a/spec/services/releases/destroy_service_spec.rb b/spec/services/releases/destroy_service_spec.rb
index bc5bff0b31d..46550ac5bef 100644
--- a/spec/services/releases/destroy_service_spec.rb
+++ b/spec/services/releases/destroy_service_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Releases::DestroyService do
end
context 'when release is not found' do
- let!(:release) { }
+ let!(:release) {}
it 'returns an error' do
is_expected.to include(status: :error,
diff --git a/spec/services/releases/update_service_spec.rb b/spec/services/releases/update_service_spec.rb
index 932a7fab5ec..7461470a844 100644
--- a/spec/services/releases/update_service_spec.rb
+++ b/spec/services/releases/update_service_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Releases::UpdateService do
end
context 'when the release does not exist' do
- let!(:release) { }
+ let!(:release) {}
it_behaves_like 'a failed update'
end
diff --git a/spec/services/resource_access_tokens/create_service_spec.rb b/spec/services/resource_access_tokens/create_service_spec.rb
index 127948549b0..442232920f9 100644
--- a/spec/services/resource_access_tokens/create_service_spec.rb
+++ b/spec/services/resource_access_tokens/create_service_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe ResourceAccessTokens::CreateService do
describe '#execute' do
shared_examples 'token creation fails' do
- let(:resource) { create(:project)}
+ let(:resource) { create(:project) }
it 'does not add the project bot as a member' do
expect { subject }.not_to change { resource.members.count }
diff --git a/spec/services/resource_events/change_labels_service_spec.rb b/spec/services/resource_events/change_labels_service_spec.rb
index c2c0a4c2126..8dc7b07e397 100644
--- a/spec/services/resource_events/change_labels_service_spec.rb
+++ b/spec/services/resource_events/change_labels_service_spec.rb
@@ -5,11 +5,40 @@ require 'spec_helper'
RSpec.describe ResourceEvents::ChangeLabelsService do
let_it_be(:project) { create(:project) }
let_it_be(:author) { create(:user) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:incident) { create(:incident, project: project) }
- let(:resource) { create(:issue, project: project) }
+ let(:resource) { issue }
- describe '.change_labels' do
- subject { described_class.new(resource, author).execute(added_labels: added, removed_labels: removed) }
+ describe '#execute' do
+ shared_examples 'creating timeline events' do
+ context 'when resource is not an incident' do
+ let(:resource) { issue }
+
+ it 'does not call create timeline events service' do
+ expect(IncidentManagement::TimelineEvents::CreateService).not_to receive(:change_labels)
+
+ change_labels
+ end
+ end
+
+ context 'when resource is an incident' do
+ let(:resource) { incident }
+
+ it 'calls create timeline events service with correct attributes' do
+ expect(IncidentManagement::TimelineEvents::CreateService)
+ .to receive(:change_labels)
+ .with(resource, author, added_labels: added, removed_labels: removed)
+ .and_call_original
+
+ change_labels
+ end
+ end
+ end
+
+ subject(:change_labels) do
+ described_class.new(resource, author).execute(added_labels: added, removed_labels: removed)
+ end
let_it_be(:labels) { create_list(:label, 2, project: project) }
@@ -20,9 +49,9 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
end
it 'expires resource note etag cache' do
- expect_any_instance_of(Gitlab::EtagCaching::Store)
- .to receive(:touch)
- .with("/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes")
+ expect_any_instance_of(Gitlab::EtagCaching::Store).to receive(:touch).with(
+ "/#{resource.project.namespace.to_param}/#{resource.project.to_param}/noteable/issue/#{resource.id}/notes"
+ )
described_class.new(resource, author).execute(added_labels: [labels[0]])
end
@@ -32,10 +61,12 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [] }
it 'creates new label event' do
- expect { subject }.to change { resource.resource_label_events.count }.from(0).to(1)
+ expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(1)
expect_label_event(resource.resource_label_events.first, labels[0], 'add')
end
+
+ it_behaves_like 'creating timeline events'
end
context 'when removing a label' do
@@ -43,10 +74,12 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:removed) { [labels[1]] }
it 'creates new label event' do
- expect { subject }.to change { resource.resource_label_events.count }.from(0).to(1)
+ expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(1)
expect_label_event(resource.resource_label_events.first, labels[1], 'remove')
end
+
+ it_behaves_like 'creating timeline events'
end
context 'when both adding and removing labels' do
@@ -55,8 +88,10 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
it 'creates all label events in a single query' do
expect(ApplicationRecord).to receive(:legacy_bulk_insert).once.and_call_original
- expect { subject }.to change { resource.resource_label_events.count }.from(0).to(2)
+ expect { change_labels }.to change { resource.resource_label_events.count }.from(0).to(2)
end
+
+ it_behaves_like 'creating timeline events'
end
describe 'usage data' do
@@ -67,7 +102,7 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
it 'tracks changed labels' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_label_changed_action)
- subject
+ change_labels
end
end
@@ -75,9 +110,10 @@ RSpec.describe ResourceEvents::ChangeLabelsService do
let(:resource) { create(:merge_request, source_project: project) }
it 'does not track changed labels' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_label_changed_action)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
+ .not_to receive(:track_issue_label_changed_action)
- subject
+ change_labels
end
end
end
diff --git a/spec/services/search/group_service_spec.rb b/spec/services/search/group_service_spec.rb
index 7beeec98b23..152d0700cc1 100644
--- a/spec/services/search/group_service_spec.rb
+++ b/spec/services/search/group_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Search::GroupService do
# These projects shouldn't be found
let!(:outside_project) { create(:project, :public, name: "Outside #{term}") }
- let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" )}
+ let!(:private_project) { create(:project, :private, namespace: nested_group, name: "Private #{term}" ) }
let!(:other_project) { create(:project, :public, namespace: nested_group, name: term.reverse) }
# These projects should be found
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
index 4346d0a9e07..1fd196cdcee 100644
--- a/spec/services/security/ci_configuration/sast_parser_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Security::CiConfiguration::SastParserService do
let(:bandit) { configuration['analyzers'][0] }
let(:brakeman) { configuration['analyzers'][1] }
let(:sast_brakeman_level) { brakeman['variables'][0] }
+ let(:secure_analyzers_prefix) { '$CI_TEMPLATE_REGISTRY_HOST/security-products' }
it 'parses the configuration for SAST' do
expect(secure_analyzers['default_value']).to eql(secure_analyzers_prefix)
diff --git a/spec/services/snippets/update_service_spec.rb b/spec/services/snippets/update_service_spec.rb
index f61d33e2436..67cc258b4b6 100644
--- a/spec/services/snippets/update_service_spec.rb
+++ b/spec/services/snippets/update_service_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Snippets::UpdateService do
context 'when snippet_actions param is used' do
let(:file_path) { 'CHANGELOG' }
- let(:created_file_path) { 'New file'}
+ let(:created_file_path) { 'New file' }
let(:content) { 'foobar' }
let(:snippet_actions) { [{ action: :move, previous_path: snippet.file_name, file_path: file_path }, { action: :create, file_path: created_file_path, content: content }] }
let(:base_opts) do
diff --git a/spec/services/suggestions/apply_service_spec.rb b/spec/services/suggestions/apply_service_spec.rb
index 6052882813e..e34324d5fe2 100644
--- a/spec/services/suggestions/apply_service_spec.rb
+++ b/spec/services/suggestions/apply_service_spec.rb
@@ -359,7 +359,7 @@ RSpec.describe Suggestions::ApplyService do
end
context 'multiple suggestions' do
- let(:author_emails) { suggestions.map {|s| s.note.author.commit_email_or_default } }
+ let(:author_emails) { suggestions.map { |s| s.note.author.commit_email_or_default } }
let(:first_author) { suggestion.note.author }
let(:commit) { project.repository.commit }
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 741d136b9a0..a192fae27db 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -134,15 +134,15 @@ RSpec.describe SystemNoteService do
end
end
- describe '.change_due_date' do
- let(:due_date) { double }
+ describe '.change_start_date_or_due_date' do
+ let(:changed_dates) { double }
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
- expect(service).to receive(:change_due_date).with(due_date)
+ expect(service).to receive(:change_start_date_or_due_date).with(changed_dates)
end
- described_class.change_due_date(noteable, project, author, due_date)
+ described_class.change_start_date_or_due_date(noteable, project, author, changed_dates)
end
end
@@ -159,30 +159,6 @@ RSpec.describe SystemNoteService do
end
end
- describe '.request_attention' do
- let(:user) { double }
-
- it 'calls IssuableService' do
- expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:request_attention).with(user)
- end
-
- described_class.request_attention(noteable, project, author, user)
- end
- end
-
- describe '.remove_attention_request' do
- let(:user) { double }
-
- it 'calls IssuableService' do
- expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:remove_attention_request).with(user)
- end
-
- described_class.remove_attention_request(noteable, project, author, user)
- end
- end
-
describe '.merge_when_pipeline_succeeds' do
it 'calls MergeRequestsService' do
sha = double
@@ -375,13 +351,14 @@ RSpec.describe SystemNoteService do
describe '.noteable_cloned' do
let(:noteable_ref) { double }
let(:direction) { double }
+ let(:created_at) { double }
it 'calls IssuableService' do
expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
- expect(service).to receive(:noteable_cloned).with(noteable_ref, direction)
+ expect(service).to receive(:noteable_cloned).with(noteable_ref, direction, created_at: created_at)
end
- described_class.noteable_cloned(double, double, noteable_ref, double, direction: direction)
+ described_class.noteable_cloned(double, double, noteable_ref, double, direction: direction, created_at: created_at)
end
end
@@ -431,9 +408,22 @@ RSpec.describe SystemNoteService do
end
end
+ describe '.created_timelog' do
+ let(:issue) { create(:issue, project: project) }
+ let(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
+
+ it 'calls TimeTrackingService' do
+ expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
+ expect(service).to receive(:created_timelog)
+ end
+
+ described_class.created_timelog(noteable, project, author, timelog)
+ end
+ end
+
describe '.remove_timelog' do
let(:issue) { create(:issue, project: project) }
- let(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800)}
+ let(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
it 'calls TimeTrackingService' do
expect_next_instance_of(::SystemNotes::TimeTrackingService) do |service|
@@ -742,4 +732,38 @@ RSpec.describe SystemNoteService do
described_class.delete_timeline_event(noteable, author)
end
end
+
+ describe '.relate_work_item' do
+ let(:work_item) { double('work_item', issue_type: :task) }
+ let(:noteable) { double }
+
+ before do
+ allow(noteable).to receive(:project).and_return(double)
+ end
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:hierarchy_changed).with(work_item, 'relate')
+ end
+
+ described_class.relate_work_item(noteable, work_item, double)
+ end
+ end
+
+ describe '.unrelate_wotk_item' do
+ let(:work_item) { double('work_item', issue_type: :task) }
+ let(:noteable) { double }
+
+ before do
+ allow(noteable).to receive(:project).and_return(double)
+ end
+
+ it 'calls IssuableService' do
+ expect_next_instance_of(::SystemNotes::IssuablesService) do |service|
+ expect(service).to receive(:hierarchy_changed).with(work_item, 'unrelate')
+ end
+
+ described_class.unrelate_work_item(noteable, work_item, double)
+ end
+ end
end
diff --git a/spec/services/system_notes/issuables_service_spec.rb b/spec/services/system_notes/issuables_service_spec.rb
index 5bc7ea82976..b2ccd9dba52 100644
--- a/spec/services/system_notes/issuables_service_spec.rb
+++ b/spec/services/system_notes/issuables_service_spec.rb
@@ -247,42 +247,6 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
- describe '#request_attention' do
- subject { service.request_attention(user) }
-
- let(:user) { create(:user) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'attention_requested' }
- end
-
- context 'when attention requested' do
- it_behaves_like 'a note with overridable created_at'
-
- it 'sets the note text' do
- expect(subject.note).to eq "requested attention from @#{user.username}"
- end
- end
- end
-
- describe '#remove_attention_request' do
- subject { service.remove_attention_request(user) }
-
- let(:user) { create(:user) }
-
- it_behaves_like 'a system note' do
- let(:action) { 'attention_request_removed' }
- end
-
- context 'when attention request is removed' do
- it_behaves_like 'a note with overridable created_at'
-
- it 'sets the note text' do
- expect(subject.note).to eq "removed attention request from @#{user.username}"
- end
- end
- end
-
describe '#change_title' do
let(:noteable) { create(:issue, project: project, title: 'Lorem ipsum') }
@@ -559,8 +523,8 @@ RSpec.describe ::SystemNotes::IssuablesService do
let(:action) { 'task' }
end
- it "posts the 'marked the task as complete' system note" do
- expect(subject.note).to eq("marked the task **task** as completed")
+ it "posts the 'marked the checklist item as complete' system note" do
+ expect(subject.note).to eq("marked the checklist item **task** as completed")
end
end
@@ -625,8 +589,8 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
describe '#noteable_cloned' do
- let(:new_project) { create(:project) }
- let(:new_noteable) { create(:issue, project: new_project) }
+ let_it_be(:new_project) { create(:project) }
+ let_it_be(:new_noteable) { create(:issue, project: new_project) }
subject do
service.noteable_cloned(new_noteable, direction)
@@ -684,6 +648,22 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
+ context 'custom created timestamp' do
+ let(:direction) { :from }
+
+ it 'allows setting of custom created_at value' do
+ timestamp = 1.day.ago
+
+ note = service.noteable_cloned(new_noteable, direction, created_at: timestamp)
+
+ expect(note.created_at).to be_like_time(timestamp)
+ end
+
+ it 'defaults to current time when created_at is not given', :freeze_time do
+ expect(subject.created_at).to be_like_time(Time.current)
+ end
+ end
+
context 'metrics' do
context 'cloned from' do
let(:direction) { :from }
@@ -696,15 +676,20 @@ RSpec.describe ::SystemNotes::IssuablesService do
end
end
- context 'cloned to' do
+ context 'cloned to', :snowplow do
let(:direction) { :to }
it 'tracks usage' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter)
- .to receive(:track_issue_cloned_action).with(author: author)
+ .to receive(:track_issue_cloned_action).with(author: author, project: project )
subject
end
+
+ it_behaves_like 'issue_edit snowplow tracking' do
+ let(:property) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CLONED }
+ let(:user) { author }
+ end
end
end
end
@@ -886,4 +871,43 @@ RSpec.describe ::SystemNotes::IssuablesService do
it { expect(subject.note).to eq "changed issue type to incident" }
end
+
+ describe '#hierarchy_changed' do
+ let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
+ let_it_be_with_reload(:task) { create(:work_item, :task, project: project) }
+
+ let(:service) { described_class.new(noteable: work_item, project: project, author: author) }
+
+ subject { service.hierarchy_changed(task, hierarchy_change_action) }
+
+ context 'when task is added as a child' do
+ let(:hierarchy_change_action) { 'relate' }
+
+ it_behaves_like 'a system note' do
+ let(:expected_noteable) { task }
+ let(:action) { 'relate_to_parent' }
+ end
+
+ it 'sets the correct note text' do
+ expect { subject }.to change { Note.system.count }.by(2)
+ expect(work_item.notes.last.note).to eq("added ##{task.iid} as child task")
+ expect(task.notes.last.note).to eq("added ##{work_item.iid} as parent issue")
+ end
+ end
+
+ context 'when child task is removed' do
+ let(:hierarchy_change_action) { 'unrelate' }
+
+ it_behaves_like 'a system note' do
+ let(:expected_noteable) { task }
+ let(:action) { 'unrelate_from_parent' }
+ end
+
+ it 'sets the correct note text' do
+ expect { subject }.to change { Note.system.count }.by(2)
+ expect(work_item.notes.last.note).to eq("removed child task ##{task.iid}")
+ expect(task.notes.last.note).to eq("removed parent issue ##{work_item.iid}")
+ end
+ end
+ end
end
diff --git a/spec/services/system_notes/merge_requests_service_spec.rb b/spec/services/system_notes/merge_requests_service_spec.rb
index 58d2489f878..3e66ccef106 100644
--- a/spec/services/system_notes/merge_requests_service_spec.rb
+++ b/spec/services/system_notes/merge_requests_service_spec.rb
@@ -167,8 +167,8 @@ RSpec.describe ::SystemNotes::MergeRequestsService do
end
describe '.change_branch' do
- let(:old_branch) { 'old_branch'}
- let(:new_branch) { 'new_branch'}
+ let(:old_branch) { 'old_branch' }
+ let(:new_branch) { 'new_branch' }
it_behaves_like 'a system note' do
let(:action) { 'branch' }
diff --git a/spec/services/system_notes/time_tracking_service_spec.rb b/spec/services/system_notes/time_tracking_service_spec.rb
index fdf18f4f29a..33608deaa64 100644
--- a/spec/services/system_notes/time_tracking_service_spec.rb
+++ b/spec/services/system_notes/time_tracking_service_spec.rb
@@ -3,35 +3,112 @@
require 'spec_helper'
RSpec.describe ::SystemNotes::TimeTrackingService do
- let_it_be(:author) { create(:user) }
- let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
- describe '#change_due_date' do
- subject { described_class.new(noteable: noteable, project: project, author: author).change_due_date(due_date) }
+ describe '#change_start_date_or_due_date' do
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:work_item) { create(:work_item, project: project) }
- let(:due_date) { Date.today }
+ subject(:note) { described_class.new(noteable: noteable, project: project, author: author).change_start_date_or_due_date(changed_dates) }
- context 'when noteable is an issue' do
- let_it_be(:noteable) { create(:issue, project: project) }
+ let(:start_date) { Date.today }
+ let(:due_date) { 1.week.from_now.to_date }
+ let(:changed_dates) { { 'due_date' => [nil, due_date], 'start_date' => [nil, start_date] } }
+ shared_examples 'issuable getting date change notes' do
it_behaves_like 'a note with overridable created_at'
it_behaves_like 'a system note' do
- let(:action) { 'due_date' }
+ let(:action) { 'start_date_or_due_date' }
end
- context 'when due date added' do
- it 'sets the note text' do
- expect(subject.note).to eq "changed due date to #{due_date.to_s(:long)}"
+ context 'when both dates are added' do
+ it 'sets the correct note message' do
+ expect(note.note).to eq("changed start date to #{start_date.to_s(:long)} and changed due date to #{due_date.to_s(:long)}")
end
end
- context 'when due date removed' do
- let(:due_date) { nil }
+ context 'when both dates are removed' do
+ let(:changed_dates) { { 'due_date' => [due_date, nil], 'start_date' => [start_date, nil] } }
- it 'sets the note text' do
- expect(subject.note).to eq 'removed due date'
+ before do
+ noteable.update!(start_date: start_date, due_date: due_date)
+ end
+
+ it 'sets the correct note message' do
+ expect(note.note).to eq('removed start date and removed due date')
+ end
+ end
+
+ context 'when due date is added' do
+ let(:changed_dates) { { 'due_date' => [nil, due_date] } }
+
+ it 'sets the correct note message' do
+ expect(note.note).to eq("changed due date to #{due_date.to_s(:long)}")
+ end
+
+ it 'tracks the issue event in usage ping' do
+ expect(activity_counter_class).to receive(activity_counter_method).with(author: author)
+
+ subject
end
+
+ context 'and start date removed' do
+ let(:changed_dates) { { 'due_date' => [nil, due_date], 'start_date' => [start_date, nil] } }
+
+ it 'sets the correct note message' do
+ expect(note.note).to eq("removed start date and changed due date to #{due_date.to_s(:long)}")
+ end
+ end
+ end
+
+ context 'when start_date is added' do
+ let(:changed_dates) { { 'start_date' => [nil, start_date] } }
+
+ it 'does not track the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
+
+ subject
+ end
+
+ it 'sets the correct note message' do
+ expect(note.note).to eq("changed start date to #{start_date.to_s(:long)}")
+ end
+
+ context 'and due date removed' do
+ let(:changed_dates) { { 'due_date' => [due_date, nil], 'start_date' => [nil, start_date] } }
+
+ it 'sets the correct note message' do
+ expect(note.note).to eq("changed start date to #{start_date.to_s(:long)} and removed due date")
+ end
+ end
+ end
+
+ context 'when no dates are changed' do
+ let(:changed_dates) { {} }
+
+ it 'does not create a note and returns nil' do
+ expect do
+ note
+ end.to not_change(Note, :count)
+
+ expect(note).to be_nil
+ end
+ end
+ end
+
+ context 'when noteable is an issue' do
+ let(:noteable) { issue }
+ let(:activity_counter_class) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter }
+ let(:activity_counter_method) { :track_issue_due_date_changed_action }
+
+ it_behaves_like 'issuable getting date change notes'
+
+ it 'does not track the work item event in usage ping' do
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_date_changed_action)
+
+ subject
end
it 'tracks the issue event in usage ping' do
@@ -39,13 +116,48 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
subject
end
+
+ context 'when only start_date is added' do
+ let(:changed_dates) { { 'start_date' => [nil, start_date] } }
+
+ it 'does not track the issue event in usage ping' do
+ expect(activity_counter_class).not_to receive(activity_counter_method)
+
+ subject
+ end
+ end
+ end
+
+ context 'when noteable is a work item' do
+ let(:noteable) { work_item }
+ let(:activity_counter_class) { Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter }
+ let(:activity_counter_method) { :track_work_item_date_changed_action }
+
+ it_behaves_like 'issuable getting date change notes'
+
+ it 'does not track the issue event in usage ping' do
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
+
+ subject
+ end
+
+ context 'when only start_date is added' do
+ let(:changed_dates) { { 'start_date' => [nil, start_date] } }
+
+ it 'tracks the issue event in usage ping' do
+ expect(activity_counter_class).to receive(activity_counter_method).with(author: author)
+
+ subject
+ end
+ end
end
context 'when noteable is a merge request' do
- let_it_be(:noteable) { create(:merge_request, source_project: project) }
+ let(:noteable) { create(:merge_request, source_project: project) }
it 'does not track the issue event in usage ping' do
- expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action).with(author: author)
+ expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).not_to receive(:track_issue_due_date_changed_action)
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).not_to receive(:track_work_item_date_changed_action)
subject
end
@@ -106,13 +218,37 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
end
end
+ describe '#create_timelog' do
+ subject { described_class.new(noteable: noteable, project: project, author: author).created_timelog(timelog) }
+
+ context 'when the timelog has a positive time spent value' do
+ let_it_be(:noteable, reload: true) { create(:issue, project: project) }
+
+ let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: 1800, spent_at: '2022-03-30T00:00:00.000Z') }
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "added 30m of time spent at 2022-03-30"
+ end
+ end
+
+ context 'when the timelog has a negative time spent value' do
+ let_it_be(:noteable, reload: true) { create(:issue, project: project) }
+
+ let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: -1800, spent_at: '2022-03-30T00:00:00.000Z') }
+
+ it 'sets the note text' do
+ expect(subject.note).to eq "subtracted 30m of time spent at 2022-03-30"
+ end
+ end
+ end
+
describe '#remove_timelog' do
subject { described_class.new(noteable: noteable, project: project, author: author).remove_timelog(timelog) }
context 'when the timelog has a positive time spent value' do
let_it_be(:noteable, reload: true) { create(:issue, project: project) }
- let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: 1800, spent_at: '2022-03-30T00:00:00.000Z')}
+ let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: 1800, spent_at: '2022-03-30T00:00:00.000Z') }
it 'sets the note text' do
expect(subject.note).to eq "deleted 30m of spent time from 2022-03-30"
@@ -122,7 +258,7 @@ RSpec.describe ::SystemNotes::TimeTrackingService do
context 'when the timelog has a negative time spent value' do
let_it_be(:noteable, reload: true) { create(:issue, project: project) }
- let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: -1800, spent_at: '2022-03-30T00:00:00.000Z')}
+ let(:timelog) { create(:timelog, user: author, issue: noteable, time_spent: -1800, spent_at: '2022-03-30T00:00:00.000Z') }
it 'sets the note text' do
expect(subject.note).to eq "deleted -30m of spent time from 2022-03-30"
diff --git a/spec/services/terraform/remote_state_handler_spec.rb b/spec/services/terraform/remote_state_handler_spec.rb
index 19c1d4109e9..369309e4d5a 100644
--- a/spec/services/terraform/remote_state_handler_spec.rb
+++ b/spec/services/terraform/remote_state_handler_spec.rb
@@ -171,7 +171,7 @@ RSpec.describe Terraform::RemoteStateHandler do
end
context 'with no lock ID (force-unlock)' do
- let(:lock_id) { }
+ let(:lock_id) {}
it 'unlocks the state' do
state = handler.unlock!
diff --git a/spec/services/timelogs/create_service_spec.rb b/spec/services/timelogs/create_service_spec.rb
new file mode 100644
index 00000000000..b5ed4a005c7
--- /dev/null
+++ b/spec/services/timelogs/create_service_spec.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Timelogs::CreateService do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:time_spent) { 3600 }
+ let_it_be(:spent_at) { "2022-07-08" }
+ let_it_be(:summary) { "Test summary" }
+
+ let(:issuable) { nil }
+ let(:users_container) { project }
+ let(:service) { described_class.new(issuable, time_spent, spent_at, summary, user) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'when issuable is an Issue' do
+ let_it_be(:issuable) { create(:issue, project: project) }
+ let_it_be(:note_noteable) { create(:issue, project: project) }
+
+ it_behaves_like 'issuable supports timelog creation service'
+ end
+
+ context 'when issuable is a MergeRequest' do
+ let_it_be(:issuable) { create(:merge_request, source_project: project, source_branch: 'branch-1') }
+ let_it_be(:note_noteable) { create(:merge_request, source_project: project, source_branch: 'branch-2') }
+
+ it_behaves_like 'issuable supports timelog creation service'
+ end
+
+ context 'when issuable is a WorkItem' do
+ let_it_be(:issuable) { create(:work_item, project: project, title: 'WorkItem-1') }
+ let_it_be(:note_noteable) { create(:work_item, project: project, title: 'WorkItem-2') }
+
+ it_behaves_like 'issuable supports timelog creation service'
+ end
+
+ context 'when issuable is an Incident' do
+ let_it_be(:issuable) { create(:incident, project: project) }
+ let_it_be(:note_noteable) { create(:incident, project: project) }
+
+ it_behaves_like 'issuable supports timelog creation service'
+ end
+ end
+end
diff --git a/spec/services/timelogs/delete_service_spec.rb b/spec/services/timelogs/delete_service_spec.rb
index c52cebdc5bf..ee1133af6b3 100644
--- a/spec/services/timelogs/delete_service_spec.rb
+++ b/spec/services/timelogs/delete_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Timelogs::DeleteService do
let_it_be(:author) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800)}
+ let_it_be(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
let(:service) { described_class.new(timelog, user) }
@@ -21,8 +21,8 @@ RSpec.describe Timelogs::DeleteService do
end
it 'returns the removed timelog' do
- expect(subject).to be_success
- expect(subject.payload).to eq(timelog)
+ is_expected.to be_success
+ expect(subject.payload[:timelog]).to eq(timelog)
end
end
@@ -31,7 +31,7 @@ RSpec.describe Timelogs::DeleteService do
let!(:timelog) { nil }
it 'returns an error' do
- expect(subject).to be_error
+ is_expected.to be_error
expect(subject.message).to eq('Timelog doesn\'t exist or you don\'t have permission to delete it')
expect(subject.http_status).to eq(404)
end
@@ -41,7 +41,7 @@ RSpec.describe Timelogs::DeleteService do
let(:user) { create(:user) }
it 'returns an error' do
- expect(subject).to be_error
+ is_expected.to be_error
expect(subject.message).to eq('Timelog doesn\'t exist or you don\'t have permission to delete it')
expect(subject.http_status).to eq(404)
end
@@ -49,14 +49,14 @@ RSpec.describe Timelogs::DeleteService do
context 'when the timelog deletion fails' do
let(:user) { author }
- let!(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800)}
+ let!(:timelog) { create(:timelog, user: author, issue: issue, time_spent: 1800) }
before do
allow(timelog).to receive(:destroy).and_return(false)
end
it 'returns an error' do
- expect(subject).to be_error
+ is_expected.to be_error
expect(subject.message).to eq('Failed to remove timelog')
expect(subject.http_status).to eq(400)
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 1cb44366457..45a8268043f 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -207,7 +207,7 @@ RSpec.describe TodoService do
end
it_behaves_like 'an incident management tracked event', :incident_management_incident_todo do
- let(:current_user) { john_doe}
+ let(:current_user) { john_doe }
end
end
end
@@ -1139,7 +1139,7 @@ RSpec.describe TodoService do
it 'updates related todos for the user with the new_state' do
method_call
- expect(collection.all? { |todo| todo.reload.state?(new_state)}).to be_truthy
+ expect(collection.all? { |todo| todo.reload.state?(new_state) }).to be_truthy
end
if new_resolved_by
@@ -1250,17 +1250,6 @@ RSpec.describe TodoService do
end
end
- describe '#create_attention_requested_todo' do
- let(:target) { create(:merge_request, author: author, source_project: project) }
- let(:user) { create(:user) }
-
- it 'creates a todo for user' do
- service.create_attention_requested_todo(target, author, user)
-
- should_create_todo(user: user, target: target, action: Todo::ATTENTION_REQUESTED)
- end
- end
-
def should_create_todo(attributes = {})
attributes.reverse_merge!(
project: project,
diff --git a/spec/services/todos/destroy/design_service_spec.rb b/spec/services/todos/destroy/design_service_spec.rb
index 61a6718dc9d..92b25d94dc6 100644
--- a/spec/services/todos/destroy/design_service_spec.rb
+++ b/spec/services/todos/destroy/design_service_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe Todos::Destroy::DesignService do
let_it_be(:design_2) { create(:design) }
let_it_be(:design_3) { create(:design) }
- let_it_be(:create_action) { create(:design_action, design: design)}
- let_it_be(:create_action_2) { create(:design_action, design: design_2)}
+ let_it_be(:create_action) { create(:design_action, design: design) }
+ let_it_be(:create_action_2) { create(:design_action, design: design_2) }
describe '#execute' do
before do
@@ -23,8 +23,8 @@ RSpec.describe Todos::Destroy::DesignService do
subject { described_class.new([design.id, design_2.id, design_3.id]).execute }
context 'when the design has been archived' do
- let_it_be(:archive_action) { create(:design_action, design: design, event: :deletion)}
- let_it_be(:archive_action_2) { create(:design_action, design: design_3, event: :deletion)}
+ let_it_be(:archive_action) { create(:design_action, design: design, event: :deletion) }
+ let_it_be(:archive_action_2) { create(:design_action, design: design_3, event: :deletion) }
it 'removes todos for that design' do
expect { subject }.to change { Todo.count }.from(4).to(1)
diff --git a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
index 24f74bae7c8..6d6abe06d1c 100644
--- a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
+++ b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
@@ -4,31 +4,46 @@ require 'spec_helper'
RSpec.describe Todos::Destroy::DestroyedIssuableService do
describe '#execute' do
- let_it_be(:target) { create(:merge_request) }
- let_it_be(:pending_todo) { create(:todo, :pending, project: target.project, target: target, user: create(:user)) }
- let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: create(:user)) }
+ let_it_be(:user) { create(:user) }
- def execute
- described_class.new(target.id, target.class.name).execute
- end
+ subject { described_class.new(target.id, target.class.name).execute }
+
+ context 'when target is merge request' do
+ let_it_be(:target) { create(:merge_request) }
+ let_it_be(:pending_todo) { create(:todo, :pending, project: target.project, target: target, user: user) }
+ let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: user) }
- it 'deletes todos for specified target ID and type' do
- control_count = ActiveRecord::QueryRecorder.new { execute }.count
+ it 'deletes todos for specified target ID and type' do
+ control_count = ActiveRecord::QueryRecorder.new { subject }.count
- # Create more todos for the target
- create(:todo, :pending, project: target.project, target: target, user: create(:user))
- create(:todo, :pending, project: target.project, target: target, user: create(:user))
- create(:todo, :done, project: target.project, target: target, user: create(:user))
- create(:todo, :done, project: target.project, target: target, user: create(:user))
+ # Create more todos for the target
+ create(:todo, :pending, project: target.project, target: target, user: user)
+ create(:todo, :pending, project: target.project, target: target, user: user)
+ create(:todo, :done, project: target.project, target: target, user: user)
+ create(:todo, :done, project: target.project, target: target, user: user)
- expect { execute }.not_to exceed_query_limit(control_count)
- expect(target.reload.todos.count).to eq(0)
+ expect { subject }.not_to exceed_query_limit(control_count)
+ end
+
+ it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do
+ expect { subject }
+ .to change { pending_todo.user.todos_pending_count }.from(1).to(0)
+ .and change { done_todo.user.todos_done_count }.from(1).to(0)
+ end
end
- it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do
- expect { execute }
- .to change { pending_todo.user.todos_pending_count }.from(1).to(0)
- .and change { done_todo.user.todos_done_count }.from(1).to(0)
+ context 'when target is an work item' do
+ let_it_be(:target) { create(:work_item) }
+ let_it_be(:todo1) { create(:todo, :pending, project: target.project, target: target, user: user) }
+ let_it_be(:todo2) { create(:todo, :done, project: target.project, target: target, user: user) }
+ # rubocop: disable Cop/AvoidBecomes
+ let_it_be(:todo3) { create(:todo, :pending, project: target.project, target: target.becomes(Issue), user: user) }
+ let_it_be(:todo4) { create(:todo, :done, project: target.project, target: target.becomes(Issue), user: user) }
+ # rubocop: enable Cop/AvoidBecomes
+
+ it 'deletes todos' do
+ expect { subject }.to change(Todo, :count).by(-4)
+ end
end
end
end
diff --git a/spec/services/topics/merge_service_spec.rb b/spec/services/topics/merge_service_spec.rb
new file mode 100644
index 00000000000..971917eb8e9
--- /dev/null
+++ b/spec/services/topics/merge_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Topics::MergeService do
+ let_it_be(:source_topic) { create(:topic, name: 'source_topic') }
+ let_it_be(:target_topic) { create(:topic, name: 'target_topic') }
+ let_it_be(:project_1) { create(:project, :public, topic_list: source_topic.name ) }
+ let_it_be(:project_2) { create(:project, :private, topic_list: source_topic.name ) }
+ let_it_be(:project_3) { create(:project, :public, topic_list: target_topic.name ) }
+ let_it_be(:project_4) { create(:project, :public, topic_list: [source_topic.name, target_topic.name] ) }
+
+ subject { described_class.new(source_topic, target_topic).execute }
+
+ describe '#execute' do
+ it 'merges source topic into target topic' do
+ subject
+
+ expect(target_topic.projects).to contain_exactly(project_1, project_2, project_3, project_4)
+ expect { source_topic.reload }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+
+ it 'refreshes counters of target topic' do
+ expect { subject }
+ .to change { target_topic.reload.total_projects_count }.by(2)
+ .and change { target_topic.reload.non_private_projects_count }.by(1)
+ end
+
+ context 'when source topic fails to delete' do
+ it 'reverts previous changes' do
+ allow(source_topic.reload).to receive(:destroy!).and_raise(ActiveRecord::RecordNotDestroyed)
+
+ expect { subject }.to raise_error(ActiveRecord::RecordNotDestroyed)
+
+ expect(source_topic.projects).to contain_exactly(project_1, project_2, project_4)
+ expect(target_topic.projects).to contain_exactly(project_3, project_4)
+ end
+ end
+
+ context 'for parameter validation' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.new(source_topic_parameter, target_topic_parameter).execute }
+
+ where(:source_topic_parameter, :target_topic_parameter, :expected_message) do
+ nil | ref(:target_topic) | 'The source topic is not a topic.'
+ ref(:source_topic) | nil | 'The target topic is not a topic.'
+ ref(:target_topic) | ref(:target_topic) | 'The source topic and the target topic are identical.' # rubocop:disable Lint/BinaryOperatorWithIdenticalOperands
+ end
+
+ with_them do
+ it 'raises correct error' do
+ expect { subject }.to raise_error(ArgumentError) do |error|
+ expect(error.message).to eq(expected_message)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/uploads/destroy_service_spec.rb b/spec/services/uploads/destroy_service_spec.rb
new file mode 100644
index 00000000000..bb58da231b6
--- /dev/null
+++ b/spec/services/uploads/destroy_service_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Uploads::DestroyService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:upload) { create(:upload, :issuable_upload, model: project) }
+
+ let(:filename) { File.basename(upload.path) }
+ let(:secret) { upload.secret }
+ let(:model) { project }
+ let(:service) { described_class.new(model, user) }
+
+ describe '#execute' do
+ subject { service.execute(secret, filename) }
+
+ shared_examples_for 'upload not found' do
+ it 'does not delete any upload' do
+ expect { subject }.not_to change { Upload.count }
+ end
+
+ it 'returns an error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq("The resource that you are attempting to access does not "\
+ "exist or you don't have permission to perform this action.")
+ end
+ end
+
+ context 'when user is nil' do
+ let(:user) { nil }
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when user cannot destroy upload' do
+ before do
+ project.add_developer(user)
+ end
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when user can destroy upload' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'deletes the upload' do
+ expect { subject }.to change { Upload.count }.by(-1)
+ end
+
+ it 'returns success response' do
+ expect(subject[:status]).to eq(:success)
+ expect(subject[:upload]).to eq(upload)
+ end
+
+ context 'when upload is not found' do
+ let(:filename) { 'not existing filename' }
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when upload secret is not found' do
+ let(:secret) { 'aaaaaaaaaa' }
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when upload secret has invalid format' do
+ let(:secret) { 'invalid' }
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when unknown model is used' do
+ let(:model) { user }
+
+ it 'raises an error' do
+ expect { subject }.to raise_exception(ArgumentError)
+ end
+ end
+
+ context 'when upload belongs to other model' do
+ let_it_be(:upload) { create(:upload, :namespace_upload) }
+
+ it_behaves_like 'upload not found'
+ end
+
+ context 'when upload destroy fails' do
+ before do
+ allow(service).to receive(:find_upload).and_return(upload)
+ allow(upload).to receive(:destroy).and_return(false)
+ end
+
+ it 'returns error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Upload could not be deleted.')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/users/create_service_spec.rb b/spec/services/users/create_service_spec.rb
index 74340bac055..f3c9701c556 100644
--- a/spec/services/users/create_service_spec.rb
+++ b/spec/services/users/create_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Users::CreateService do
describe '#execute' do
+ let(:password) { User.random_password }
let(:admin_user) { create(:admin) }
context 'with an admin user' do
@@ -12,7 +13,7 @@ RSpec.describe Users::CreateService do
context 'when required parameters are provided' do
let(:params) do
- { name: 'John Doe', username: 'jduser', email: email, password: 'mydummypass' }
+ { name: 'John Doe', username: 'jduser', email: email, password: password }
end
it 'returns a persisted user' do
@@ -82,13 +83,13 @@ RSpec.describe Users::CreateService do
context 'when force_random_password parameter is true' do
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', force_random_password: true }
+ { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, force_random_password: true }
end
it 'generates random password' do
user = service.execute
- expect(user.password).not_to eq 'mydummypass'
+ expect(user.password).not_to eq password
expect(user.password).to be_present
end
end
@@ -99,7 +100,7 @@ RSpec.describe Users::CreateService do
name: 'John Doe',
username: 'jduser',
email: 'jd@example.com',
- password: 'mydummypass',
+ password: password,
password_automatically_set: true
}
end
@@ -121,7 +122,7 @@ RSpec.describe Users::CreateService do
context 'when skip_confirmation parameter is true' do
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', skip_confirmation: true }
+ { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, skip_confirmation: true }
end
it 'confirms the user' do
@@ -131,7 +132,7 @@ RSpec.describe Users::CreateService do
context 'when reset_password parameter is true' do
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', reset_password: true }
+ { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, reset_password: true }
end
it 'resets password even if a password parameter is given' do
@@ -152,7 +153,7 @@ RSpec.describe Users::CreateService do
context 'with nil user' do
let(:params) do
- { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: 'mydummypass', skip_confirmation: true }
+ { name: 'John Doe', username: 'jduser', email: 'jd@example.com', password: password, skip_confirmation: true }
end
let(:service) { described_class.new(nil, params) }
diff --git a/spec/services/users/dismiss_namespace_callout_service_spec.rb b/spec/services/users/dismiss_namespace_callout_service_spec.rb
new file mode 100644
index 00000000000..fbcdb66c9e8
--- /dev/null
+++ b/spec/services/users/dismiss_namespace_callout_service_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DismissNamespaceCalloutService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { feature_name: feature_name, namespace_id: user.namespace.id } }
+ let(:feature_name) { Users::NamespaceCallout.feature_names.each_key.first }
+
+ subject(:execute) do
+ described_class.new(
+ container: nil, current_user: user, params: params
+ ).execute
+ end
+
+ it_behaves_like 'dismissing user callout', Users::NamespaceCallout
+
+ it 'sets the namespace_id' do
+ expect(execute.namespace_id).to eq(user.namespace.id)
+ end
+ end
+end
diff --git a/spec/services/users/dismiss_project_callout_service_spec.rb b/spec/services/users/dismiss_project_callout_service_spec.rb
new file mode 100644
index 00000000000..73e50a4c37d
--- /dev/null
+++ b/spec/services/users/dismiss_project_callout_service_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::DismissProjectCalloutService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ let(:params) { { feature_name: feature_name, project_id: project.id } }
+ let(:feature_name) { Users::ProjectCallout.feature_names.each_key.first }
+
+ subject(:execute) do
+ described_class.new(
+ container: nil, current_user: user, params: params
+ ).execute
+ end
+
+ it_behaves_like 'dismissing user callout', Users::ProjectCallout
+
+ it 'sets the project_id' do
+ expect(execute.project_id).to eq(project.id)
+ end
+ end
+end
diff --git a/spec/services/users/update_service_spec.rb b/spec/services/users/update_service_spec.rb
index 52c7b54ed72..411cd7316d8 100644
--- a/spec/services/users/update_service_spec.rb
+++ b/spec/services/users/update_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Users::UpdateService do
- let(:password) { 'longsecret987!' }
+ let(:password) { User.random_password }
let(:user) { create(:user, password: password, password_confirmation: password) }
describe '#execute' do
diff --git a/spec/services/web_hook_service_spec.rb b/spec/services/web_hook_service_spec.rb
index 339ffc44e4d..fed3ae7a543 100644
--- a/spec/services/web_hook_service_spec.rb
+++ b/spec/services/web_hook_service_spec.rb
@@ -190,7 +190,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
context 'when auth credentials are present' do
- let_it_be(:url) {'https://example.org'}
+ let_it_be(:url) { 'https://example.org' }
let_it_be(:project_hook) { create(:project_hook, url: 'https://demo:demo@example.org/') }
it 'uses the credentials' do
@@ -205,7 +205,7 @@ RSpec.describe WebHookService, :request_store, :clean_gitlab_redis_shared_state
end
context 'when auth credentials are partial present' do
- let_it_be(:url) {'https://example.org'}
+ let_it_be(:url) { 'https://example.org' }
let_it_be(:project_hook) { create(:project_hook, url: 'https://demo@example.org/') }
it 'uses the credentials anyways' do
diff --git a/spec/services/web_hooks/destroy_service_spec.rb b/spec/services/web_hooks/destroy_service_spec.rb
index 4d9bb18e540..ca8cb8a1b75 100644
--- a/spec/services/web_hooks/destroy_service_spec.rb
+++ b/spec/services/web_hooks/destroy_service_spec.rb
@@ -8,43 +8,54 @@ RSpec.describe WebHooks::DestroyService do
subject { described_class.new(user) }
describe '#execute' do
- %i[system_hook project_hook].each do |factory|
- context "deleting a #{factory}" do
- let!(:hook) { create(factory) } # rubocop: disable Rails/SaveBang (false-positive!)
- let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ # Testing with a project hook only - for permission tests, see policy specs.
+ let!(:hook) { create(:project_hook) }
+ let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+
+ context 'when the user does not have permission' do
+ it 'is an error' do
+ expect(subject.execute(hook))
+ .to be_error
+ .and have_attributes(message: described_class::DENIED)
+ end
+ end
- it 'is successful' do
- expect(subject.execute(hook)).to be_success
- end
+ context 'when the user does have permission' do
+ before do
+ hook.project.add_maintainer(user)
+ end
- it 'destroys the hook' do
- expect { subject.execute(hook) }.to change(WebHook, :count).from(1).to(0)
- end
+ it 'is successful' do
+ expect(subject.execute(hook)).to be_success
+ end
- it 'does not destroy logs' do
- expect { subject.execute(hook) }.not_to change(WebHookLog, :count)
- end
+ it 'destroys the hook' do
+ expect { subject.execute(hook) }.to change(WebHook, :count).from(1).to(0)
+ end
- it 'schedules the destruction of logs' do
- expect(WebHooks::LogDestroyWorker).to receive(:perform_async).with({ 'hook_id' => hook.id })
- expect(Gitlab::AppLogger).to receive(:info).with(match(/scheduled a deletion of logs/))
+ it 'does not destroy logs' do
+ expect { subject.execute(hook) }.not_to change(WebHookLog, :count)
+ end
- subject.execute(hook)
- end
+ it 'schedules the destruction of logs' do
+ expect(WebHooks::LogDestroyWorker).to receive(:perform_async).with({ 'hook_id' => hook.id })
+ expect(Gitlab::AppLogger).to receive(:info).with(match(/scheduled a deletion of logs/))
- context 'when the hook fails to destroy' do
- before do
- allow(hook).to receive(:destroy).and_return(false)
- end
+ subject.execute(hook)
+ end
+
+ context 'when the hook fails to destroy' do
+ before do
+ allow(hook).to receive(:destroy).and_return(false)
+ end
- it 'is not a success' do
- expect(WebHooks::LogDestroyWorker).not_to receive(:perform_async)
+ it 'is not a success' do
+ expect(WebHooks::LogDestroyWorker).not_to receive(:perform_async)
- r = subject.execute(hook)
+ r = subject.execute(hook)
- expect(r).to be_error
- expect(r[:message]).to match %r{Unable to destroy}
- end
+ expect(r).to be_error
+ expect(r[:message]).to match %r{Unable to destroy}
end
end
end
diff --git a/spec/services/web_hooks/log_execution_service_spec.rb b/spec/services/web_hooks/log_execution_service_spec.rb
index 873f6adc8dc..1967a8368fb 100644
--- a/spec/services/web_hooks/log_execution_service_spec.rb
+++ b/spec/services/web_hooks/log_execution_service_spec.rb
@@ -101,27 +101,6 @@ RSpec.describe WebHooks::LogExecutionService do
it 'resets the failure count' do
expect { service.execute }.to change(project_hook, :recent_failures).to(0)
end
-
- it 'sends a message to AuthLogger if the hook as not previously enabled' do
- project_hook.update!(recent_failures: ::WebHook::FAILURE_THRESHOLD + 1)
-
- expect(Gitlab::AuthLogger).to receive(:info).with include(
- message: 'WebHook change active_state',
- # identification
- hook_id: project_hook.id,
- hook_type: project_hook.type,
- project_id: project_hook.project_id,
- group_id: nil,
- # relevant data
- prev_state: :permanently_disabled,
- new_state: :enabled,
- duration: 1.2,
- response_status: '200',
- recent_hook_failures: 0
- )
-
- service.execute
- end
end
end
@@ -158,27 +137,6 @@ RSpec.describe WebHooks::LogExecutionService do
expect { service.execute }.not_to change(project_hook, :recent_failures)
end
end
-
- it 'sends a message to AuthLogger if the state would change' do
- project_hook.update!(recent_failures: ::WebHook::FAILURE_THRESHOLD)
-
- expect(Gitlab::AuthLogger).to receive(:info).with include(
- message: 'WebHook change active_state',
- # identification
- hook_id: project_hook.id,
- hook_type: project_hook.type,
- project_id: project_hook.project_id,
- group_id: nil,
- # relevant data
- prev_state: :enabled,
- new_state: :permanently_disabled,
- duration: (be > 0),
- response_status: data[:response_status],
- recent_hook_failures: ::WebHook::FAILURE_THRESHOLD + 1
- )
-
- service.execute
- end
end
context 'when response_category is :error' do
@@ -200,25 +158,6 @@ RSpec.describe WebHooks::LogExecutionService do
expect { service.execute }.to change(project_hook, :backoff_count).by(1)
end
- it 'sends a message to AuthLogger if the state would change' do
- expect(Gitlab::AuthLogger).to receive(:info).with include(
- message: 'WebHook change active_state',
- # identification
- hook_id: project_hook.id,
- hook_type: project_hook.type,
- project_id: project_hook.project_id,
- group_id: nil,
- # relevant data
- prev_state: :enabled,
- new_state: :temporarily_disabled,
- duration: (be > 0),
- response_status: data[:response_status],
- recent_hook_failures: 0
- )
-
- service.execute
- end
-
context 'when the previous cool-off was near the maximum' do
before do
project_hook.update!(disabled_until: 5.minutes.ago, backoff_count: 8)
diff --git a/spec/services/webauthn/authenticate_service_spec.rb b/spec/services/webauthn/authenticate_service_spec.rb
index 61f64f24f5e..b40f9465b63 100644
--- a/spec/services/webauthn/authenticate_service_spec.rb
+++ b/spec/services/webauthn/authenticate_service_spec.rb
@@ -30,19 +30,28 @@ RSpec.describe Webauthn::AuthenticateService do
get_result['clientExtensionResults'] = {}
service = Webauthn::AuthenticateService.new(user, get_result.to_json, challenge)
- expect(service.execute).to be_truthy
+ expect(service.execute).to eq true
end
- it 'returns false if the response is valid but no matching stored credential is present' do
- other_client = WebAuthn::FakeClient.new(origin)
- other_client.create(challenge: challenge) # rubocop:disable Rails/SaveBang
+ context 'when response is valid but no matching stored credential is present' do
+ it 'returns false' do
+ other_client = WebAuthn::FakeClient.new(origin)
+ other_client.create(challenge: challenge) # rubocop:disable Rails/SaveBang
- get_result = other_client.get(challenge: challenge)
+ get_result = other_client.get(challenge: challenge)
- get_result['clientExtensionResults'] = {}
- service = Webauthn::AuthenticateService.new(user, get_result.to_json, challenge)
+ get_result['clientExtensionResults'] = {}
+ service = Webauthn::AuthenticateService.new(user, get_result.to_json, challenge)
+
+ expect(service.execute).to eq false
+ end
+ end
- expect(service.execute).to be_falsey
+ context 'when device response includes invalid json' do
+ it 'returns false' do
+ service = Webauthn::AuthenticateService.new(user, 'invalid JSON', '')
+ expect(service.execute).to eq false
+ end
end
end
end
diff --git a/spec/services/work_items/create_and_link_service_spec.rb b/spec/services/work_items/create_and_link_service_spec.rb
index 81be15f9e2f..e259a22d388 100644
--- a/spec/services/work_items/create_and_link_service_spec.rb
+++ b/spec/services/work_items/create_and_link_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe WorkItems::CreateAndLinkService do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:user) { create(:user) }
- let_it_be(:related_work_item) { create(:work_item, project: project) }
+ let_it_be(:related_work_item, refind: true) { create(:work_item, project: project) }
let_it_be(:invalid_parent) { create(:work_item, :task, project: project) }
let(:spam_params) { double }
@@ -24,6 +24,26 @@ RSpec.describe WorkItems::CreateAndLinkService do
project.add_developer(user)
end
+ shared_examples 'successful work item and link creator' do
+ it 'creates a work item successfully with links' do
+ expect do
+ service_result
+ end.to change(WorkItem, :count).by(1).and(
+ change(WorkItems::ParentLink, :count).by(1)
+ )
+ end
+
+ it 'copies confidential status from the parent' do
+ expect do
+ service_result
+ end.to change(WorkItem, :count).by(1)
+
+ created_task = WorkItem.last
+
+ expect(created_task.confidential).to eq(related_work_item.confidential)
+ end
+ end
+
describe '#execute' do
subject(:service_result) { described_class.new(project: project, current_user: user, params: params, spam_params: spam_params, link_params: link_params).execute }
@@ -42,15 +62,21 @@ RSpec.describe WorkItems::CreateAndLinkService do
)
end
+ it_behaves_like 'title with extra spaces'
+
context 'when link params are valid' do
let(:link_params) { { parent_work_item: related_work_item } }
- it 'creates a work item successfully with links' do
- expect do
- service_result
- end.to change(WorkItem, :count).by(1).and(
- change(WorkItems::ParentLink, :count).by(1)
- )
+ context 'when parent is not confidential' do
+ it_behaves_like 'successful work item and link creator'
+ end
+
+ context 'when parent is confidential' do
+ before do
+ related_work_item.update!(confidential: true)
+ end
+
+ it_behaves_like 'successful work item and link creator'
end
end
diff --git a/spec/services/work_items/create_from_task_service_spec.rb b/spec/services/work_items/create_from_task_service_spec.rb
index 7d2dab228b1..7c5430f038c 100644
--- a/spec/services/work_items/create_from_task_service_spec.rb
+++ b/spec/services/work_items/create_from_task_service_spec.rb
@@ -64,6 +64,8 @@ RSpec.describe WorkItems::CreateFromTaskService do
expect(list_work_item.description).to eq("- [ ] #{created_work_item.to_reference}+")
end
+
+ it_behaves_like 'title with extra spaces'
end
context 'when last operation fails' do
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index 4009c85bacd..c0bcf9b606d 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -65,6 +65,12 @@ RSpec.describe WorkItems::CreateService do
expect(work_item.description).to eq('please fix')
expect(work_item.work_item_type.base_type).to eq('issue')
end
+
+ it 'calls NewIssueWorker with correct arguments' do
+ expect(NewIssueWorker).to receive(:perform_async).with(Integer, current_user.id, 'WorkItem')
+
+ service_result
+ end
end
context 'when params are invalid' do
@@ -170,7 +176,7 @@ RSpec.describe WorkItems::CreateService do
let_it_be(:parent) { create(:work_item, :task, project: project) }
it_behaves_like 'fails creating work item and returns errors' do
- let(:error_message) { 'only Issue and Incident can be parent of Task.'}
+ let(:error_message) { 'only Issue and Incident can be parent of Task.' }
end
end
@@ -197,7 +203,7 @@ RSpec.describe WorkItems::CreateService do
end
it_behaves_like 'fails creating work item and returns errors' do
- let(:error_message) { 'No matching task found. Make sure that you are adding a valid task ID.'}
+ let(:error_message) { 'No matching task found. Make sure that you are adding a valid task ID.' }
end
end
end
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 85b0ee040cd..0ba41373544 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -12,10 +12,10 @@ RSpec.describe WorkItems::ParentLinks::CreateService do
let_it_be(:task1) { create(:work_item, :task, project: project) }
let_it_be(:task2) { create(:work_item, :task, project: project) }
let_it_be(:guest_task) { create(:work_item, :task) }
- let_it_be(:invalid_task) { build_stubbed(:work_item, :task, id: non_existing_record_id)}
+ let_it_be(:invalid_task) { build_stubbed(:work_item, :task, id: non_existing_record_id) }
let_it_be(:another_project) { (create :project) }
let_it_be(:other_project_task) { create(:work_item, :task, iid: 100, project: another_project) }
- let_it_be(:existing_parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item)}
+ let_it_be(:existing_parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item) }
let(:parent_link_class) { WorkItems::ParentLink }
let(:issuable_type) { :task }
@@ -84,13 +84,26 @@ RSpec.describe WorkItems::ParentLinks::CreateService do
expect(subject[:created_references].map(&:work_item_id)).to match_array([task1.id, task2.id])
end
+ it 'creates notes', :aggregate_failures do
+ subject
+
+ work_item_notes = work_item.notes.last(2)
+ expect(work_item_notes.first.note).to eq("added #{task1.to_reference} as child task")
+ expect(work_item_notes.last.note).to eq("added #{task2.to_reference} as child task")
+ expect(task1.notes.last.note).to eq("added #{work_item.to_reference} as parent issue")
+ expect(task2.notes.last.note).to eq("added #{work_item.to_reference} as parent issue")
+ end
+
context 'when task is already assigned' do
let(:params) { { issuable_references: [task, task2] } }
- it 'creates links only for non related tasks' do
+ it 'creates links only for non related tasks', :aggregate_failures do
expect { subject }.to change(parent_link_class, :count).by(1)
expect(subject[:created_references].map(&:work_item_id)).to match_array([task2.id])
+ expect(work_item.notes.last.note).to eq("added #{task2.to_reference} as child task")
+ expect(task2.notes.last.note).to eq("added #{work_item.to_reference} as parent issue")
+ expect(task.notes).to be_empty
end
end
@@ -109,6 +122,15 @@ RSpec.describe WorkItems::ParentLinks::CreateService do
is_expected.to eq(service_error(error, http_status: 422))
end
+
+ it 'creates notes for valid links' do
+ subject
+
+ expect(work_item.notes.last.note).to eq("added #{task1.to_reference} as child task")
+ expect(task1.notes.last.note).to eq("added #{work_item.to_reference} as parent issue")
+ expect(issue.notes).to be_empty
+ expect(other_project_task.notes).to be_empty
+ end
end
context 'when parent type is invalid' do
diff --git a/spec/services/work_items/parent_links/destroy_service_spec.rb b/spec/services/work_items/parent_links/destroy_service_spec.rb
index 574b70af397..654a03ef6f7 100644
--- a/spec/services/work_items/parent_links/destroy_service_spec.rb
+++ b/spec/services/work_items/parent_links/destroy_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe WorkItems::ParentLinks::DestroyService do
let_it_be(:project) { create(:project) }
let_it_be(:work_item) { create(:work_item, project: project) }
let_it_be(:task) { create(:work_item, :task, project: project) }
- let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item)}
+ let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item) }
let(:parent_link_class) { WorkItems::ParentLink }
@@ -23,8 +23,11 @@ RSpec.describe WorkItems::ParentLinks::DestroyService do
context 'when user has permissions to update work items' do
let(:user) { reporter }
- it 'removes relation' do
+ it 'removes relation and creates notes', :aggregate_failures do
expect { subject }.to change(parent_link_class, :count).by(-1)
+
+ expect(work_item.notes.last.note).to eq("removed child task #{task.to_reference}")
+ expect(task.notes.last.note).to eq("removed parent issue #{work_item.to_reference}")
end
it 'returns success message' do
@@ -35,8 +38,10 @@ RSpec.describe WorkItems::ParentLinks::DestroyService do
context 'when user has insufficient permissions' do
let(:user) { guest }
- it 'does not remove relation' do
+ it 'does not remove relation', :aggregate_failures do
expect { subject }.not_to change(parent_link_class, :count).from(1)
+
+ expect(SystemNoteService).not_to receive(:unrelate_work_item)
end
it 'returns error message' do
diff --git a/spec/services/work_items/update_service_spec.rb b/spec/services/work_items/update_service_spec.rb
index b17c9ffb4fb..2e0b0051495 100644
--- a/spec/services/work_items/update_service_spec.rb
+++ b/spec/services/work_items/update_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe WorkItems::UpdateService do
let_it_be(:developer) { create(:user) }
- let_it_be(:project) { create(:project).tap { |proj| proj.add_developer(developer) } }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:project) { create(:project) }
let_it_be(:parent) { create(:work_item, project: project) }
let_it_be_with_reload(:work_item) { create(:work_item, project: project, assignees: [developer]) }
@@ -13,21 +14,36 @@ RSpec.describe WorkItems::UpdateService do
let(:opts) { {} }
let(:current_user) { developer }
+ before do
+ project.add_developer(developer)
+ project.add_guest(guest)
+ end
+
describe '#execute' do
- subject(:update_work_item) do
+ let(:service) do
described_class.new(
project: project,
current_user: current_user,
params: opts,
spam_params: spam_params,
widget_params: widget_params
- ).execute(work_item)
+ )
end
+ subject(:update_work_item) { service.execute(work_item) }
+
before do
stub_spam_services
end
+ shared_examples 'update service that triggers graphql dates updated subscription' do
+ it 'triggers graphql subscription issueableDatesUpdated' do
+ expect(GraphqlTriggers).to receive(:issuable_dates_updated).with(work_item).and_call_original
+
+ update_work_item
+ end
+ end
+
context 'when title is changed' do
let(:opts) { { title: 'changed' } }
@@ -50,6 +66,16 @@ RSpec.describe WorkItems::UpdateService do
end
end
+ context 'when dates are changed' do
+ let(:opts) { { start_date: Date.today } }
+
+ it 'tracks users updating work item dates' do
+ expect(Gitlab::UsageDataCounters::WorkItemActivityUniqueCounter).to receive(:track_work_item_date_changed_action).with(author: current_user)
+
+ update_work_item
+ end
+ end
+
context 'when updating state_event' do
context 'when state_event is close' do
let(:opts) { { state_event: 'close' } }
@@ -82,8 +108,7 @@ RSpec.describe WorkItems::UpdateService do
let(:widget_params) do
{
hierarchy_widget: { parent: parent },
- description_widget: { description: 'foo' },
- weight_widget: { weight: 1 }
+ description_widget: { description: 'foo' }
}
end
@@ -101,8 +126,7 @@ RSpec.describe WorkItems::UpdateService do
let(:supported_widgets) do
[
- { klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :update, params: { description: 'foo' } },
- { klass: WorkItems::Widgets::WeightService::UpdateService, callback: :update, params: { weight: 1 } },
+ { klass: WorkItems::Widgets::DescriptionService::UpdateService, callback: :before_update_callback, params: { description: 'foo' } },
{ klass: WorkItems::Widgets::HierarchyService::UpdateService, callback: :before_update_in_transaction, params: { parent: parent } }
]
end
@@ -126,7 +150,7 @@ RSpec.describe WorkItems::UpdateService do
before do
allow_next_instance_of(widget_service_class) do |instance|
allow(instance)
- .to receive(:update)
+ .to receive(:before_update_callback)
.with(params: { description: 'changed' }).and_return(nil)
end
end
@@ -142,6 +166,69 @@ RSpec.describe WorkItems::UpdateService do
expect(work_item.description).to eq('changed')
end
+
+ context 'with mentions', :mailer, :sidekiq_might_not_need_inline do
+ shared_examples 'creates the todo and sends email' do |attribute|
+ it 'creates a todo and sends email' do
+ expect { perform_enqueued_jobs { update_work_item } }.to change(Todo, :count).by(1)
+ expect(work_item.reload.attributes[attribute.to_s]).to eq("mention #{guest.to_reference}")
+ should_email(guest)
+ end
+ end
+
+ context 'when description contains a user mention' do
+ let(:widget_params) { { description_widget: { description: "mention #{guest.to_reference}" } } }
+
+ it_behaves_like 'creates the todo and sends email', :description
+ end
+
+ context 'when title contains a user mention' do
+ let(:opts) { { title: "mention #{guest.to_reference}" } }
+
+ it_behaves_like 'creates the todo and sends email', :title
+ end
+ end
+
+ context 'when work item validation fails' do
+ let(:opts) { { title: '' } }
+
+ it 'returns validation errors' do
+ expect(update_work_item[:message]).to contain_exactly("Title can't be blank")
+ end
+
+ it 'does not execute after-update widgets', :aggregate_failures do
+ expect(service).to receive(:update).and_call_original
+ expect(service).not_to receive(:execute_widgets).with(callback: :update, widget_params: widget_params)
+
+ expect { update_work_item }.not_to change(work_item, :description)
+ end
+ end
+ end
+
+ context 'for start and due date widget' do
+ let(:updated_date) { 1.week.from_now.to_date }
+
+ context 'when due_date is updated' do
+ let(:widget_params) { { start_and_due_date_widget: { due_date: updated_date } } }
+
+ it_behaves_like 'update service that triggers graphql dates updated subscription'
+ end
+
+ context 'when start_date is updated' do
+ let(:widget_params) { { start_and_due_date_widget: { start_date: updated_date } } }
+
+ it_behaves_like 'update service that triggers graphql dates updated subscription'
+ end
+
+ context 'when no date param is updated' do
+ let(:opts) { { title: 'should not trigger' } }
+
+ it 'does not trigger date updated subscription' do
+ expect(GraphqlTriggers).not_to receive(:issuable_dates_updated)
+
+ update_work_item
+ end
+ end
end
context 'for the hierarchy widget' do
@@ -175,6 +262,22 @@ RSpec.describe WorkItems::UpdateService do
end.to not_change(WorkItems::ParentLink, :count).and(not_change(work_item, :title))
end
end
+
+ context 'when work item validation fails' do
+ let(:opts) { { title: '' } }
+
+ it 'returns validation errors' do
+ expect(update_work_item[:message]).to contain_exactly("Title can't be blank")
+ end
+
+ it 'does not execute after-update widgets', :aggregate_failures do
+ expect(service).to receive(:update).and_call_original
+ expect(service).not_to receive(:execute_widgets).with(callback: :before_update_in_transaction, widget_params: widget_params)
+ expect(work_item.work_item_children).not_to include(child_work_item)
+
+ update_work_item
+ end
+ end
end
end
end
diff --git a/spec/services/work_items/widgets/assignees_service/update_service_spec.rb b/spec/services/work_items/widgets/assignees_service/update_service_spec.rb
new file mode 100644
index 00000000000..0ab2c85f078
--- /dev/null
+++ b/spec/services/work_items/widgets/assignees_service/update_service_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::AssigneesService::UpdateService, :freeze_time do
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:new_assignee) { create(:user) }
+
+ let(:work_item) do
+ create(:work_item, project: project, updated_at: 1.day.ago)
+ end
+
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Assignees) } }
+ let(:current_user) { reporter }
+ let(:params) { { assignee_ids: [new_assignee.id] } }
+
+ before_all do
+ project.add_reporter(reporter)
+ project.add_guest(new_assignee)
+ end
+
+ describe '#before_update_in_transaction' do
+ subject do
+ described_class.new(widget: widget, current_user: current_user)
+ .before_update_in_transaction(params: params)
+ end
+
+ it 'updates the assignees and sets updated_at to the current time' do
+ subject
+
+ expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
+ expect(work_item.updated_at).to be_like_time(Time.current)
+ end
+
+ context 'when passing an empty array' do
+ let(:params) { { assignee_ids: [] } }
+
+ before do
+ work_item.assignee_ids = [reporter.id]
+ end
+
+ it 'removes existing assignees' do
+ subject
+
+ expect(work_item.assignee_ids).to be_empty
+ expect(work_item.updated_at).to be_like_time(Time.current)
+ end
+ end
+
+ context 'when user does not have access' do
+ let(:current_user) { create(:user) }
+
+ it 'does not update the assignees' do
+ subject
+
+ expect(work_item.assignee_ids).to be_empty
+ expect(work_item.updated_at).to be_like_time(1.day.ago)
+ end
+ end
+
+ context 'when multiple assignees are given' do
+ let(:params) { { assignee_ids: [new_assignee.id, reporter.id] } }
+
+ context 'when work item allows multiple assignees' do
+ before do
+ allow(work_item).to receive(:allows_multiple_assignees?).and_return(true)
+ end
+
+ it 'sets all the given assignees' do
+ subject
+
+ expect(work_item.assignee_ids).to contain_exactly(new_assignee.id, reporter.id)
+ expect(work_item.updated_at).to be_like_time(Time.current)
+ end
+ end
+
+ context 'when work item does not allow multiple assignees' do
+ before do
+ allow(work_item).to receive(:allows_multiple_assignees?).and_return(false)
+ end
+
+ it 'only sets the first assignee' do
+ subject
+
+ expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
+ expect(work_item.updated_at).to be_like_time(Time.current)
+ end
+ end
+ end
+
+ context 'when assignee does not have access to the work item' do
+ let(:params) { { assignee_ids: [create(:user).id] } }
+
+ it 'does not set the assignee' do
+ subject
+
+ expect(work_item.assignee_ids).to be_empty
+ expect(work_item.updated_at).to be_like_time(1.day.ago)
+ end
+ end
+
+ context 'when assignee ids are the same as the existing ones' do
+ before do
+ work_item.assignee_ids = [new_assignee.id]
+ end
+
+ it 'does not touch updated_at' do
+ subject
+
+ expect(work_item.assignee_ids).to contain_exactly(new_assignee.id)
+ expect(work_item.updated_at).to be_like_time(1.day.ago)
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/widgets/description_service/update_service_spec.rb b/spec/services/work_items/widgets/description_service/update_service_spec.rb
index a2eceb97f09..582d9dc85f7 100644
--- a/spec/services/work_items/widgets/description_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/description_service/update_service_spec.rb
@@ -3,32 +3,102 @@
require 'spec_helper'
RSpec.describe WorkItems::Widgets::DescriptionService::UpdateService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be_with_reload(:work_item) { create(:work_item, project: project, description: 'old description') }
+ let_it_be(:random_user) { create(:user) }
+ let_it_be(:author) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:reporter) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
- let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Description) } }
+ let(:params) { { description: 'updated description' } }
+ let(:current_user) { author }
+ let(:work_item) do
+ create(:work_item, author: author, project: project, description: 'old description',
+ last_edited_at: Date.yesterday, last_edited_by: random_user
+ )
+ end
- describe '#update' do
- subject { described_class.new(widget: widget, current_user: user).update(params: params) } # rubocop:disable Rails/SaveBang
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Description) } }
- context 'when description param is present' do
- let(:params) { { description: 'updated description' } }
+ describe '#update' do
+ subject { described_class.new(widget: widget, current_user: current_user).before_update_callback(params: params) }
+ shared_examples 'sets work item description' do
it 'correctly sets work item description value' do
subject
- expect(work_item.description).to eq('updated description')
+ expect(work_item.description).to eq(params[:description])
+ expect(work_item.last_edited_by).to eq(current_user)
+ expect(work_item.last_edited_at).to be_within(2.seconds).of(Time.current)
end
end
- context 'when description param is not present' do
- let(:params) { {} }
-
+ shared_examples 'does not set work item description' do
it 'does not change work item description value' do
subject
expect(work_item.description).to eq('old description')
+ expect(work_item.last_edited_by).to eq(random_user)
+ expect(work_item.last_edited_at).to eq(Date.yesterday)
+ end
+ end
+
+ context 'when user has permission to update description' do
+ context 'when user is work item author' do
+ let(:current_user) { author }
+
+ it_behaves_like 'sets work item description'
+ end
+
+ context 'when user is a project reporter' do
+ let(:current_user) { reporter }
+
+ before do
+ project.add_reporter(reporter)
+ end
+
+ it_behaves_like 'sets work item description'
+ end
+
+ context 'when description is nil' do
+ let(:current_user) { author }
+ let(:params) { { description: nil } }
+
+ it_behaves_like 'sets work item description'
+ end
+
+ context 'when description is empty' do
+ let(:current_user) { author }
+ let(:params) { { description: '' } }
+
+ it_behaves_like 'sets work item description'
+ end
+
+ context 'when description param is not present' do
+ let(:params) { {} }
+
+ it_behaves_like 'does not set work item description'
+ end
+ end
+
+ context 'when user does not have permission to update description' do
+ context 'when user is a project guest' do
+ let(:current_user) { guest }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it_behaves_like 'does not set work item description'
+ end
+
+ context 'with private project' do
+ let_it_be(:project) { create(:project) }
+
+ context 'when user is work item author' do
+ let(:current_user) { author }
+
+ it_behaves_like 'does not set work item description'
+ end
end
end
end
diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
index 4f6ff1b8676..9a425d5308c 100644
--- a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
let_it_be(:child_work_item) { create(:work_item, :task, project: project) }
let_it_be(:existing_link) { create(:parent_link, work_item: child_work_item, work_item_parent: work_item) }
- let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } }
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } }
let(:not_found_error) { 'No matching task found. Make sure that you are adding a valid task ID.' }
shared_examples 'raises a WidgetError' do
@@ -29,13 +29,21 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
end
+ context 'when invalid params are present' do
+ let(:params) { { other_parent: parent_work_item } }
+
+ it_behaves_like 'raises a WidgetError' do
+ let(:message) { 'One or more arguments are invalid: other_parent.' }
+ end
+ end
+
context 'when updating children' do
let_it_be(:child_work_item2) { create(:work_item, :task, project: project) }
let_it_be(:child_work_item3) { create(:work_item, :task, project: project) }
let_it_be(:child_work_item4) { create(:work_item, :task, project: project) }
context 'when work_items_hierarchy feature flag is disabled' do
- let(:params) { { children: [child_work_item4] }}
+ let(:params) { { children: [child_work_item4] } }
before do
stub_feature_flags(work_items_hierarchy: false)
@@ -47,7 +55,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
context 'when user has insufficient permissions to link work items' do
- let(:params) { { children: [child_work_item4] }}
+ let(:params) { { children: [child_work_item4] } }
it_behaves_like 'raises a WidgetError' do
let(:message) { not_found_error }
@@ -60,7 +68,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
context 'with valid params' do
- let(:params) { { children: [child_work_item2, child_work_item3] }}
+ let(:params) { { children: [child_work_item2, child_work_item3] } }
it 'correctly sets work item parent' do
subject
@@ -71,7 +79,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
context 'when child is already assigned' do
- let(:params) { { children: [child_work_item] }}
+ let(:params) { { children: [child_work_item] } }
it_behaves_like 'raises a WidgetError' do
let(:message) { 'Task(s) already assigned' }
@@ -81,7 +89,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
context 'when child type is invalid' do
let_it_be(:child_issue) { create(:work_item, project: project) }
- let(:params) { { children: [child_issue] }}
+ let(:params) { { children: [child_issue] } }
it_behaves_like 'raises a WidgetError' do
let(:message) do
@@ -95,7 +103,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
context 'when updating parent' do
let_it_be(:work_item) { create(:work_item, :task, project: project) }
- let(:params) {{ parent: parent_work_item } }
+ let(:params) { { parent: parent_work_item } }
context 'when work_items_hierarchy feature flag is disabled' do
before do
@@ -144,9 +152,9 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService do
end
context 'when type is invalid' do
- let_it_be(:parent_task) { create(:work_item, :task, project: project)}
+ let_it_be(:parent_task) { create(:work_item, :task, project: project) }
- let(:params) {{ parent: parent_task } }
+ let(:params) { { parent: parent_task } }
it_behaves_like 'raises a WidgetError' do
let(:message) do
diff --git a/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
new file mode 100644
index 00000000000..d328c541fc7
--- /dev/null
+++ b/spec/services/work_items/widgets/start_and_due_date_service/update_service_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkItems::Widgets::StartAndDueDateService::UpdateService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
+
+ let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::StartAndDueDate) } }
+
+ describe '#before_update_callback' do
+ let(:start_date) { Date.today }
+ let(:due_date) { 1.week.from_now.to_date }
+
+ subject(:update_params) do
+ described_class.new(widget: widget, current_user: user).before_update_callback(params: params)
+ end
+
+ context 'when start and due date params are present' do
+ let(:params) { { start_date: Date.today, due_date: 1.week.from_now.to_date } }
+
+ it 'correctly sets date values' do
+ expect do
+ update_params
+ end.to change(work_item, :start_date).from(nil).to(start_date).and(
+ change(work_item, :due_date).from(nil).to(due_date)
+ )
+ end
+ end
+
+ context 'when date params are not present' do
+ let(:params) { {} }
+
+ it 'does not change work item date values' do
+ expect do
+ update_params
+ end.to not_change(work_item, :start_date).from(nil).and(
+ not_change(work_item, :due_date).from(nil)
+ )
+ end
+ end
+
+ context 'when work item had both date values already set' do
+ before do
+ work_item.update!(start_date: start_date, due_date: due_date)
+ end
+
+ context 'when one of the two params is null' do
+ let(:params) { { start_date: nil } }
+
+ it 'sets only one date to null' do
+ expect do
+ update_params
+ end.to change(work_item, :start_date).from(start_date).to(nil).and(
+ not_change(work_item, :due_date).from(due_date)
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/widgets/weight_service/update_service_spec.rb b/spec/services/work_items/widgets/weight_service/update_service_spec.rb
deleted file mode 100644
index 97e17f1c526..00000000000
--- a/spec/services/work_items/widgets/weight_service/update_service_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe WorkItems::Widgets::WeightService::UpdateService do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
- let_it_be_with_reload(:work_item) { create(:work_item, project: project, weight: 1) }
-
- let(:widget) { work_item.widgets.find {|widget| widget.is_a?(WorkItems::Widgets::Weight) } }
-
- describe '#update' do
- subject { described_class.new(widget: widget, current_user: user).update(params: params) } # rubocop:disable Rails/SaveBang
-
- context 'when weight param is present' do
- let(:params) { { weight: 2 } }
-
- it 'correctly sets work item weight value' do
- subject
-
- expect(work_item.weight).to eq(2)
- end
- end
-
- context 'when weight param is not present' do
- let(:params) { {} }
-
- it 'does not change work item weight value', :aggregate_failures do
- expect { subject }
- .to not_change { work_item.weight }
-
- expect(work_item.weight).to eq(1)
- end
- end
- end
-end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 47cd78873f8..8acf3bcf9c0 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -53,10 +53,8 @@ end
require 'rainbow/ext/string'
Rainbow.enabled = false
-# Require JH first because we need override some EE methods with JH methods,
-# if we load EE first, we can't find JH modules in prepend_mod method
-require_relative('../jh/spec/spec_helper') if Gitlab.jh?
require_relative('../ee/spec/spec_helper') if Gitlab.ee?
+require_relative('../jh/spec/spec_helper') if Gitlab.jh?
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
@@ -161,7 +159,6 @@ RSpec.configure do |config|
config.include LicenseHelpers
config.include ActiveJob::TestHelper
config.include ActiveSupport::Testing::TimeHelpers
- config.include CycleAnalyticsHelpers
config.include FactoryBot::Syntax::Methods
config.include FixtureHelpers
config.include NonExistingRecordsHelpers
@@ -208,6 +205,7 @@ RSpec.configure do |config|
include StubFeatureFlags
include StubSnowplow
+ include StubMember
if ENV['CI'] || ENV['RETRIES']
# This includes the first try, i.e. tests will be run 4 times before failing.
@@ -334,6 +332,9 @@ RSpec.configure do |config|
# See https://docs.gitlab.com/ee/development/feature_flags/#selectively-disable-by-actor
stub_feature_flags(legacy_merge_request_state_check_for_merged_result_pipelines: false)
+ # Will be removed in https://gitlab.com/gitlab-org/gitlab/-/issues/369875
+ stub_feature_flags(override_group_level_protected_environment_settings_permission: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -391,6 +392,11 @@ RSpec.configure do |config|
Gitlab::WithRequestStore.with_request_store { example.run }
end
+ config.around(:example, :enable_rugged) do |example|
+ # Skip tests that need rugged when using praefect DB.
+ example.run unless GitalySetup.praefect_with_db?
+ end
+
# previous test runs may have left some resources throttled
config.before do
::Gitlab::ExclusiveLease.reset_all!("el:throttle:*")
@@ -505,3 +511,16 @@ module TouchRackUploadedFile
end
Rack::Test::UploadedFile.prepend(TouchRackUploadedFile)
+
+# Monkey-patch to enable ActiveSupport::Notifications for Redis commands
+module RedisCommands
+ module Instrumentation
+ def process(commands, &block)
+ ActiveSupport::Notifications.instrument('redis.process_commands', commands: commands) do
+ super(commands, &block)
+ end
+ end
+ end
+end
+
+Redis::Client.prepend(RedisCommands::Instrumentation)
diff --git a/spec/support/database/cross-join-allowlist.yml b/spec/support/database/cross-join-allowlist.yml
index 19b1ce30d5f..fe51488c706 100644
--- a/spec/support/database/cross-join-allowlist.yml
+++ b/spec/support/database/cross-join-allowlist.yml
@@ -1,6 +1 @@
-- "./spec/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans_spec.rb"
-- "./spec/lib/gitlab/background_migration/migrate_pages_metadata_spec.rb"
-- "./spec/migrations/20210907211557_finalize_ci_builds_bigint_conversion_spec.rb"
-- "./spec/migrations/associate_existing_dast_builds_with_variables_spec.rb"
-- "./spec/migrations/disable_job_token_scope_when_unused_spec.rb"
-- "./spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb"
+[]
diff --git a/spec/support/database/gitlab_schemas_validate_connection.rb b/spec/support/database/gitlab_schemas_validate_connection.rb
new file mode 100644
index 00000000000..118c6ea5001
--- /dev/null
+++ b/spec/support/database/gitlab_schemas_validate_connection.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ def with_gitlab_schemas_validate_connection_prevented
+ Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection.with_suppressed do
+ yield
+ end
+ end
+
+ config.around(:each, :suppress_gitlab_schemas_validate_connection) do |example|
+ with_gitlab_schemas_validate_connection_prevented(&example)
+ end
+
+ config.around(:each, query_analyzers: false) do |example|
+ with_gitlab_schemas_validate_connection_prevented(&example)
+ end
+end
diff --git a/spec/support/database/multiple_databases.rb b/spec/support/database/multiple_databases.rb
index 94857b47127..05f26e57e9c 100644
--- a/spec/support/database/multiple_databases.rb
+++ b/spec/support/database/multiple_databases.rb
@@ -98,6 +98,26 @@ RSpec.configure do |config|
example.run
end
end
+
+ config.around(:each, :migration) do |example|
+ migration_schema = example.metadata[:migration]
+ migration_schema = :gitlab_main if migration_schema == true
+ base_model = Gitlab::Database.schemas_to_base_models.fetch(migration_schema).first
+
+ # Migration require an `ActiveRecord::Base` to point to desired database
+ if base_model != ActiveRecord::Base
+ with_reestablished_active_record_base do
+ reconfigure_db_connection(
+ model: ActiveRecord::Base,
+ config_model: base_model
+ )
+
+ example.run
+ end
+ else
+ example.run
+ end
+ end
end
ActiveRecord::Base.singleton_class.prepend(::Database::ActiveRecordBaseEstablishConnection) # rubocop:disable Database/MultipleDatabases
diff --git a/spec/support/finder_collection_allowlist.yml b/spec/support/finder_collection_allowlist.yml
index 8f09153afec..1ac8e49fb45 100644
--- a/spec/support/finder_collection_allowlist.yml
+++ b/spec/support/finder_collection_allowlist.yml
@@ -57,6 +57,8 @@
- Security::ScanExecutionPoliciesFinder
- Security::TrainingProviders::BaseUrlFinder
- Security::TrainingUrlsFinder
+- Security::TrainingProviders::KontraUrlFinder
+- Security::TrainingProviders::SecureCodeWarriorUrlFinder
- SentryIssueFinder
- ServerlessDomainFinder
- TagsFinder
diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb
index fd85071cca3..62bb9576695 100644
--- a/spec/support/helpers/api_helpers.rb
+++ b/spec/support/helpers/api_helpers.rb
@@ -19,15 +19,17 @@ module ApiHelpers
# => "/api/v2/issues?foo=bar&private_token=..."
#
# Returns the relative path to the requested API resource
- def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil, job_token: nil)
+ def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil, job_token: nil, access_token: nil)
full_path = "/api/#{version}#{path}"
if oauth_access_token
- query_string = "access_token=#{oauth_access_token.token}"
+ query_string = "access_token=#{oauth_access_token.plaintext_token}"
elsif personal_access_token
query_string = "private_token=#{personal_access_token.token}"
elsif job_token
query_string = "job_token=#{job_token}"
+ elsif access_token
+ query_string = "access_token=#{access_token.token}"
elsif user
personal_access_token = create(:personal_access_token, user: user)
query_string = "private_token=#{personal_access_token.token}"
@@ -66,6 +68,13 @@ module ApiHelpers
expect(json_response.map { |item| item['id'] }).to contain_exactly(*items)
end
+ def expect_paginated_array_response_contain_exactly(*items)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.map { |item| item['id'] }).to contain_exactly(*items)
+ end
+
def stub_last_activity_update
allow_any_instance_of(Users::ActivityService).to receive(:execute)
end
diff --git a/spec/support/helpers/ci/template_helpers.rb b/spec/support/helpers/ci/template_helpers.rb
index 598a5a0becc..119f8d001a1 100644
--- a/spec/support/helpers/ci/template_helpers.rb
+++ b/spec/support/helpers/ci/template_helpers.rb
@@ -5,6 +5,10 @@ module Ci
def secure_analyzers_prefix
'registry.gitlab.com/security-products'
end
+
+ def template_registry_host
+ 'registry.gitlab.com'
+ end
end
end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 044ec56b1cc..05e9a099a2b 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -1,8 +1,6 @@
# frozen_string_literal: true
module CycleAnalyticsHelpers
- include GitHelpers
-
def toggle_value_stream_dropdown
page.find('[data-testid="dropdown-value-streams"]').click
end
@@ -129,10 +127,6 @@ module CycleAnalyticsHelpers
repository = project.repository
oldrev = repository.commit(branch_name)&.sha || Gitlab::Git::BLANK_SHA
- if Timecop.frozen?
- mock_gitaly_multi_action_dates(repository, commit_time)
- end
-
commit_shas = Array.new(count) do |index|
commit_sha = repository.create_file(user, generate(:branch), "content", message: message, branch_name: branch_name)
repository.commit(commit_sha)
@@ -241,23 +235,4 @@ module CycleAnalyticsHelpers
pipeline: dummy_pipeline(project),
protected: false)
end
-
- def mock_gitaly_multi_action_dates(repository, commit_time)
- allow(repository.raw).to receive(:multi_action).and_wrap_original do |m, user, kargs|
- new_date = commit_time || Time.now
- branch_update = m.call(user, **kargs)
-
- if branch_update.newrev
- commit = rugged_repo(repository).rev_parse(branch_update.newrev)
-
- branch_update.newrev = commit.amend(
- update_ref: "#{Gitlab::Git::BRANCH_REF_PREFIX}#{kargs[:branch_name]}",
- author: commit.author.merge(time: new_date),
- committer: commit.committer.merge(time: new_date)
- )
- end
-
- branch_update
- end
- end
end
diff --git a/spec/support/helpers/dns_helpers.rb b/spec/support/helpers/dns_helpers.rb
index b941e7c4808..c60c14f10a3 100644
--- a/spec/support/helpers/dns_helpers.rb
+++ b/spec/support/helpers/dns_helpers.rb
@@ -5,6 +5,7 @@ module DnsHelpers
stub_all_dns!
stub_invalid_dns!
permit_local_dns!
+ permit_postgresql!
end
def permit_dns!
@@ -25,14 +26,30 @@ module DnsHelpers
def permit_local_dns!
local_addresses = %r{
\A
- ::1? | # IPV6
- (127|10)\.0\.0\.\d{1,3} | # 127.0.0.x or 10.0.0.x local network
- (192\.168|172\.16)\.\d{1,3}\.\d{1,3} | # 192.168.x.x or 172.16.x.x local network
- 0\.0\.0\.0 | # loopback
+ ::1? | # IPV6
+ (127|10)\.0\.0\.\d{1,3} | # 127.0.0.x or 10.0.0.x local network
+ 192\.168\.\d{1,3}\.\d{1,3} | # 192.168.x.x local network
+ 172\.(1[6-9]|2[0-9]|3[0-1])\.\d{1,3}\.\d{1,3} | # 172.16.x.x - 172.31.x.x local network
+ 0\.0\.0\.0 | # loopback
localhost
\z
}xi
allow(Addrinfo).to receive(:getaddrinfo).with(local_addresses, anything, nil, :STREAM).and_call_original
allow(Addrinfo).to receive(:getaddrinfo).with(local_addresses, anything, nil, :STREAM, anything, anything, any_args).and_call_original
end
+
+ # pg v1.4.0, unlike v1.3.5, uses AddrInfo.getaddrinfo to resolve IPv4 and IPv6 addresses:
+ # https://github.com/ged/ruby-pg/pull/459
+ def permit_postgresql!
+ db_hosts.each do |host|
+ next if host.start_with?('/') # Exclude UNIX sockets
+
+ # https://github.com/ged/ruby-pg/blob/252512608a814de16bbad55911f9bbcef0e73cb9/lib/pg/connection.rb#L720
+ allow(Addrinfo).to receive(:getaddrinfo).with(host, anything, nil, :STREAM).and_call_original
+ end
+ end
+
+ def db_hosts
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).map(&:host).compact.uniq
+ end
end
diff --git a/spec/support/helpers/features/blob_spec_helpers.rb b/spec/support/helpers/features/blob_spec_helpers.rb
index 880a7249284..7ccfc9be7e2 100644
--- a/spec/support/helpers/features/blob_spec_helpers.rb
+++ b/spec/support/helpers/features/blob_spec_helpers.rb
@@ -11,12 +11,4 @@ module BlobSpecHelpers
def unset_default_button
set_default_button('')
end
-
- def editor_value
- evaluate_script('monaco.editor.getModels()[0].getValue()')
- end
-
- def set_editor_value(value)
- execute_script("monaco.editor.getModels()[0].setValue('#{value}')")
- end
end
diff --git a/spec/support/helpers/features/invite_members_modal_helper.rb b/spec/support/helpers/features/invite_members_modal_helper.rb
index b56ac5b32c6..d02ec06d886 100644
--- a/spec/support/helpers/features/invite_members_modal_helper.rb
+++ b/spec/support/helpers/features/invite_members_modal_helper.rb
@@ -11,7 +11,7 @@ module Spec
page.within invite_modal_selector do
select_members(names)
choose_options(role, expires_at)
- click_button 'Invite'
+ submit_invites
end
page.refresh if refresh
@@ -42,11 +42,15 @@ module Spec
click_button name
choose_options(role, expires_at)
- click_button 'Invite'
+ submit_invites
page.refresh
end
+ def submit_invites
+ click_button 'Invite'
+ end
+
def choose_options(role, expires_at)
unless role == 'Guest'
click_button 'Guest'
@@ -86,12 +90,47 @@ module Spec
"[data-token-id='#{id}']"
end
+ def more_invite_errors_button_selector
+ "[data-testid='accordion-button']"
+ end
+
+ def limited_invite_error_selector
+ "[data-testid='errors-limited-item']"
+ end
+
+ def expanded_invite_error_selector
+ "[data-testid='errors-expanded-item']"
+ end
+
def remove_token(id)
page.within member_token_selector(id) do
find('[data-testid="close-icon"]').click
end
end
+ def expect_to_have_successful_invite_indicator(page, user)
+ expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100")
+ expect(page).not_to have_text("#{user.name}: ")
+ end
+
+ def expect_to_have_invalid_invite_indicator(page, user, message: true)
+ expect(page).to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100")
+ expect(page).to have_selector(member_token_error_selector(user.id))
+ expect(page).to have_text("#{user.name}: Access level should be greater than or equal to") if message
+ end
+
+ def expect_to_have_normal_invite_indicator(page, user)
+ expect(page).to have_selector(member_token_selector(user.id))
+ expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-red-100")
+ expect(page).not_to have_selector("#{member_token_selector(user.id)} .gl-bg-green-100")
+ expect(page).not_to have_text("#{user.name}: ")
+ end
+
+ def expect_to_have_invite_removed(page, user)
+ expect(page).not_to have_selector(member_token_selector(user.id))
+ expect(page).not_to have_text("#{user.name}: Access level should be greater than or equal to")
+ end
+
def expect_to_have_group(group)
expect(page).to have_selector("[entity-id='#{group.id}']")
end
diff --git a/spec/support/helpers/features/runner_helpers.rb b/spec/support/helpers/features/runners_helpers.rb
index 63fc628358c..63fc628358c 100644
--- a/spec/support/helpers/features/runner_helpers.rb
+++ b/spec/support/helpers/features/runners_helpers.rb
diff --git a/spec/support/helpers/features/source_editor_spec_helpers.rb b/spec/support/helpers/features/source_editor_spec_helpers.rb
index cdc59f9cbe1..f7eb2a52507 100644
--- a/spec/support/helpers/features/source_editor_spec_helpers.rb
+++ b/spec/support/helpers/features/source_editor_spec_helpers.rb
@@ -12,8 +12,11 @@ module Spec
def editor_set_value(value)
editor = find('.monaco-editor')
uri = editor['data-uri']
+ execute_script("localMonaco.getModel('#{uri}').setValue('#{escape_javascript(value)}')")
- execute_script("monaco.editor.getModel('#{uri}').setValue('#{escape_javascript(value)}')")
+ # We only check that the first line is present because when the content is long,
+ # only a part of the text will be rendered in the DOM due to scrolling
+ page.has_selector?('.gl-source-editor .view-lines', text: value.lines.first)
end
end
end
diff --git a/spec/support/helpers/gitaly_setup.rb b/spec/support/helpers/gitaly_setup.rb
index 56993fc27b7..278dc79e1d0 100644
--- a/spec/support/helpers/gitaly_setup.rb
+++ b/spec/support/helpers/gitaly_setup.rb
@@ -12,6 +12,8 @@ require 'logger'
require 'fileutils'
require 'bundler'
+require_relative '../../../lib/gitlab/utils'
+
module GitalySetup
extend self
@@ -139,7 +141,7 @@ module GitalySetup
end
def start_praefect
- if ENV['GITALY_PRAEFECT_WITH_DB']
+ if praefect_with_db?
LOGGER.debug 'Starting Praefect with database election strategy'
start(:praefect, File.join(tmp_tests_gitaly_dir, 'praefect-db.config.toml'))
else
@@ -290,7 +292,7 @@ module GitalySetup
# In CI we need to pre-generate both config files.
# For local testing we'll create the correct file on-demand.
- if ENV['CI'] || ENV['GITALY_PRAEFECT_WITH_DB'].nil?
+ if ENV['CI'] || !praefect_with_db?
Gitlab::SetupHelper::Praefect.create_configuration(
gitaly_dir,
{ 'praefect' => repos_path },
@@ -298,7 +300,7 @@ module GitalySetup
)
end
- if ENV['CI'] || ENV['GITALY_PRAEFECT_WITH_DB']
+ if ENV['CI'] || praefect_with_db?
Gitlab::SetupHelper::Praefect.create_configuration(
gitaly_dir,
{ 'praefect' => repos_path },
@@ -319,7 +321,7 @@ module GitalySetup
end
def setup_praefect
- return unless ENV['GITALY_PRAEFECT_WITH_DB']
+ return unless praefect_with_db?
migrate_cmd = service_cmd(:praefect, File.join(tmp_tests_gitaly_dir, 'praefect-db.config.toml')) + ['sql-migrate']
system(env, *migrate_cmd, [:out, :err] => 'log/praefect-test.log')
@@ -396,4 +398,8 @@ module GitalySetup
def praefect_binary
File.join(tmp_tests_gitaly_dir, "_build", "bin", "praefect")
end
+
+ def praefect_with_db?
+ Gitlab::Utils.to_boolean(ENV['GITALY_PRAEFECT_WITH_DB'], default: false)
+ end
end
diff --git a/spec/support/helpers/global_id_deprecation_helpers.rb b/spec/support/helpers/global_id_deprecation_helpers.rb
index 37ba1420fb3..5c6862ca84a 100644
--- a/spec/support/helpers/global_id_deprecation_helpers.rb
+++ b/spec/support/helpers/global_id_deprecation_helpers.rb
@@ -2,9 +2,11 @@
module GlobalIDDeprecationHelpers
def stub_global_id_deprecations(*deprecations)
- old_name_map = deprecations.index_by(&:old_model_name)
- new_name_map = deprecations.index_by(&:new_model_name)
- old_graphql_name_map = deprecations.index_by { |d| Types::GlobalIDType.model_name_to_graphql_name(d.old_model_name) }
+ old_name_map = deprecations.index_by(&:old_name)
+ new_name_map = deprecations.index_by(&:new_name)
+ old_graphql_name_map = deprecations.index_by do |d|
+ Gitlab::GlobalId::Deprecations.map_graphql_name(d.old_name)
+ end
stub_const('Gitlab::GlobalId::Deprecations::OLD_NAME_MAP', old_name_map)
stub_const('Gitlab::GlobalId::Deprecations::NEW_NAME_MAP', new_name_map)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index d0a1941817a..d78c523decd 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -170,7 +170,7 @@ module GraphqlHelpers
# or `prepare` in app/graphql/types/range_input_type.rb, used by Types::TimeframeInputType
def args_internal(field, args:, query_ctx:, parent:, extras:, query:)
arguments = GraphqlHelpers.deep_transform_args(args, field)
- arguments.merge!(extras.reject {|k, v| v == :not_given})
+ arguments.merge!(extras.reject { |k, v| v == :not_given })
end
# Pros:
@@ -185,7 +185,7 @@ module GraphqlHelpers
# take internal style args, and force them into client style args
def args_internal_prepared(field, args:, query_ctx:, parent:, extras:, query:)
arguments = GraphqlHelpers.as_graphql_argument_literals(args)
- arguments.merge!(extras.reject {|k, v| v == :not_given})
+ arguments.merge!(extras.reject { |k, v| v == :not_given })
# Use public API to properly prepare the args for use by the resolver.
# It uses `coerce_arguments` under the covers
@@ -307,14 +307,14 @@ module GraphqlHelpers
end
def graphql_mutation(name, input, fields = nil, &block)
- raise ArgumentError, 'Please pass either `fields` parameter or a block to `#graphql_mutation`, but not both.' if fields.present? && block_given?
+ raise ArgumentError, 'Please pass either `fields` parameter or a block to `#graphql_mutation`, but not both.' if fields.present? && block
name = name.graphql_name if name.respond_to?(:graphql_name)
mutation_name = GraphqlHelpers.fieldnamerize(name)
input_variable_name = "$#{input_variable_name_for_mutation(name)}"
mutation_field = GitlabSchema.mutation.fields[mutation_name]
- fields = yield if block_given?
+ fields = yield if block
fields ||= all_graphql_fields_for(mutation_field.type.to_type_signature)
query = <<~MUTATION
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 84cd0181533..32e6e8d50bd 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -2,6 +2,7 @@
require 'action_dispatch/testing/test_request'
require 'fileutils'
+require 'graphlyte'
require_relative '../../../lib/gitlab/popen'
@@ -47,7 +48,8 @@ module JavaScriptFixturesHelpers
path = Rails.root / base / query_path
queries = Gitlab::Graphql::Queries.find(path)
if queries.length == 1
- queries.first.text(mode: Gitlab.ee? ? :ee : :ce )
+ query = queries.first.text(mode: Gitlab.ee? ? :ee : :ce )
+ inflate_query_with_typenames(query)
else
raise "Could not find query file at #{path}, please check your query_path" % path
end
@@ -55,6 +57,23 @@ module JavaScriptFixturesHelpers
private
+ # Private: Parse a GraphQL query and inflate the fields with a __typename
+ #
+ # query - the GraqhQL query to parse
+ def inflate_query_with_typenames(query, doc: Graphlyte.parse(query))
+ typename_editor.edit(doc)
+
+ doc.to_s
+ end
+
+ def typename_editor
+ typename = Graphlyte::Syntax::Field.new(name: '__typename')
+
+ @editor ||= Graphlyte::Editor.new.on_field do |field|
+ field.selection << typename unless field.selection.empty? || field.selection.map(&:name).include?('__typename')
+ end
+ end
+
# Private: Store a response object as fixture file
#
# response - string or response object to store
diff --git a/spec/support/helpers/lfs_http_helpers.rb b/spec/support/helpers/lfs_http_helpers.rb
index 199d5e70e32..91ed56b4d13 100644
--- a/spec/support/helpers/lfs_http_helpers.rb
+++ b/spec/support/helpers/lfs_http_helpers.rb
@@ -52,11 +52,9 @@ module LfsHttpHelpers
end
def request_body(operation, objects)
- objects = [objects] unless objects.is_a?(Array)
-
{
'operation' => operation,
- 'objects' => objects
+ 'objects' => Array.wrap(objects)
}
end
end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index c93ef8b0ead..f83f5c7bfde 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -91,12 +91,12 @@ module LoginHelpers
# user - User instance to login with
# remember - Whether or not to check "Remember me" (default: false)
# two_factor_auth - If two-factor authentication is enabled (default: false)
- # password - password to attempt to login with
+ # password - password to attempt to login with (default: user.password)
def gitlab_sign_in_with(user, remember: false, two_factor_auth: false, password: nil)
visit new_user_session_path
fill_in "user_login", with: user.email
- fill_in "user_password", with: (password || "12345678")
+ fill_in "user_password", with: (password || user.password)
check 'user_remember_me' if remember
find('[data-testid="sign-in-button"]:enabled').click
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 01839a74e65..dd124ed9c7f 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -14,7 +14,7 @@ module ActiveRecord
@skip_schema_queries = skip_schema_queries
@query_recorder_debug = ENV['QUERY_RECORDER_DEBUG'] || query_recorder_debug
@log_file = log_file
- record(&block) if block_given?
+ record(&block) if block
end
def record(&block)
diff --git a/spec/support/helpers/rack_attack_spec_helpers.rb b/spec/support/helpers/rack_attack_spec_helpers.rb
index 6c06781df03..2502889e17c 100644
--- a/spec/support/helpers/rack_attack_spec_helpers.rb
+++ b/spec/support/helpers/rack_attack_spec_helpers.rb
@@ -17,8 +17,12 @@ module RackAttackSpecHelpers
{ Gitlab::Auth::AuthFinders::PRIVATE_TOKEN_HEADER => personal_access_token.token }
end
+ def bearer_headers(token)
+ { 'AUTHORIZATION' => "Bearer #{token.token}" }
+ end
+
def oauth_token_headers(oauth_access_token)
- { 'AUTHORIZATION' => "Bearer #{oauth_access_token.token}" }
+ { 'AUTHORIZATION' => "Bearer #{oauth_access_token.plaintext_token}" }
end
def basic_auth_headers(user, personal_access_token)
diff --git a/spec/support/helpers/redis_commands/recorder.rb b/spec/support/helpers/redis_commands/recorder.rb
new file mode 100644
index 00000000000..05a1aa67853
--- /dev/null
+++ b/spec/support/helpers/redis_commands/recorder.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module RedisCommands
+ class Recorder
+ def initialize(pattern: nil, &block)
+ @log = []
+ @pattern = pattern
+
+ record(&block) if block
+ end
+
+ attr_reader :log
+
+ def record(&block)
+ ActiveSupport::Notifications.subscribed(method(:callback), 'redis.process_commands', &block)
+ end
+
+ def by_command(command)
+ @log.select { |record| record.include?(command) }
+ end
+
+ def count
+ @count ||= @log.count
+ end
+
+ private
+
+ def callback(name, start, finish, message_id, values)
+ commands = values[:commands]
+
+ @log << commands.flatten if @pattern.nil? || commands.to_s.include?(@pattern)
+ end
+ end
+end
diff --git a/spec/support/helpers/runner_releases_helper.rb b/spec/support/helpers/runner_releases_helper.rb
new file mode 100644
index 00000000000..ab16a705425
--- /dev/null
+++ b/spec/support/helpers/runner_releases_helper.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module RunnerReleasesHelper
+ def stub_runner_releases(available_runner_releases, gitlab_version: nil)
+ # We stub the behavior of RunnerReleases so that we don't need to rely on flaky global settings
+ available_runner_releases = available_runner_releases
+ .map { |v| ::Gitlab::VersionInfo.parse(v, parse_suffix: true) }
+ .sort
+ releases_by_minor = available_runner_releases
+ .group_by(&:without_patch)
+ .transform_values(&:max)
+
+ runner_releases_double = instance_double(Gitlab::Ci::RunnerReleases)
+ allow(::Gitlab::Ci::RunnerUpgradeCheck).to receive(:new).and_wrap_original do |method, *_original_args|
+ gitlab_version ||= available_runner_releases.max
+ method.call(gitlab_version, runner_releases_double)
+ end
+
+ allow(runner_releases_double).to receive(:releases).and_return(available_runner_releases)
+ allow(runner_releases_double).to receive(:releases_by_minor).and_return(releases_by_minor)
+ end
+end
diff --git a/spec/support/helpers/stub_member.rb b/spec/support/helpers/stub_member.rb
new file mode 100644
index 00000000000..bcd0b675041
--- /dev/null
+++ b/spec/support/helpers/stub_member.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+module StubMember
+ def self.included(base)
+ Member.prepend(StubbedMember::Member)
+ ProjectMember.prepend(StubbedMember::ProjectMember)
+ end
+end
diff --git a/spec/support/helpers/stub_method_calls.rb b/spec/support/helpers/stub_method_calls.rb
index 45d704958ca..ccbede16563 100644
--- a/spec/support/helpers/stub_method_calls.rb
+++ b/spec/support/helpers/stub_method_calls.rb
@@ -44,7 +44,7 @@ module StubMethodCalls
end
def self.stub_method(object, method, &block)
- raise ArgumentError, "Block is required" unless block_given?
+ raise ArgumentError, "Block is required" unless block
backup_method(object, method) unless backed_up_method?(object, method)
object.define_singleton_method(method, &block)
diff --git a/spec/support/helpers/stubbed_member.rb b/spec/support/helpers/stubbed_member.rb
new file mode 100644
index 00000000000..27420c9b709
--- /dev/null
+++ b/spec/support/helpers/stubbed_member.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+# Extend the ProjectMember & GroupMember class with the ability to
+# to run project_authorizations refresh jobs inline.
+
+# This is needed so that calls like `group.add_member(user, access_level)` or `create(:project_member)`
+# in the specs can be run without including `:sidekiq_inline` trait.
+module StubbedMember
+ extend ActiveSupport::Concern
+
+ module Member
+ private
+
+ def refresh_member_authorized_projects(blocking:)
+ return super unless blocking
+
+ AuthorizedProjectsWorker.new.perform(user_id)
+ end
+ end
+
+ module ProjectMember
+ private
+
+ def blocking_project_authorizations_refresh
+ AuthorizedProjectUpdate::ProjectRecalculatePerUserWorker.new.perform(project.id, user.id)
+ end
+ end
+end
diff --git a/spec/support/helpers/type_name_deprecation_helpers.rb b/spec/support/helpers/type_name_deprecation_helpers.rb
new file mode 100644
index 00000000000..591737ab532
--- /dev/null
+++ b/spec/support/helpers/type_name_deprecation_helpers.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module TypeNameDeprecationHelpers
+ def stub_type_name_deprecations(*deprecations)
+ old_name_map = deprecations.index_by(&:old_name)
+ new_name_map = deprecations.index_by(&:new_name)
+ old_graphql_name_map = deprecations.index_by do |d|
+ Gitlab::Graphql::TypeNameDeprecations.map_graphql_name(d.old_name)
+ end
+
+ stub_const('Gitlab::Graphql::TypeNameDeprecations::OLD_NAME_MAP', old_name_map)
+ stub_const('Gitlab::Graphql::TypeNameDeprecations::NEW_NAME_MAP', new_name_map)
+ stub_const('Gitlab::Graphql::TypeNameDeprecations::OLD_GRAPHQL_NAME_MAP', old_graphql_name_map)
+ end
+end
diff --git a/spec/support/matchers/event_store.rb b/spec/support/matchers/event_store.rb
index 14f6a42d7f4..4ecb924b3ed 100644
--- a/spec/support/matchers/event_store.rb
+++ b/spec/support/matchers/event_store.rb
@@ -23,8 +23,8 @@ RSpec::Matchers.define :publish_event do |expected_event_class|
def match_data?(actual, expected)
values_match?(actual.keys, expected.keys) &&
- actual.keys.each do |key|
- values_match?(actual[key], expected[key])
+ actual.keys.all? do |key|
+ values_match?(expected[key], actual[key])
end
end
diff --git a/spec/support/matchers/markdown_matchers.rb b/spec/support/matchers/markdown_matchers.rb
index 1932f78506f..8bec3be2535 100644
--- a/spec/support/matchers/markdown_matchers.rb
+++ b/spec/support/matchers/markdown_matchers.rb
@@ -189,8 +189,10 @@ module MarkdownMatchers
match do |actual|
expect(actual).to have_selector('ul.task-list', count: 2)
- expect(actual).to have_selector('li.task-list-item', count: 7)
+ expect(actual).to have_selector('li.task-list-item', count: 9)
+ expect(actual).to have_selector('li.task-list-item.inapplicable > s', count: 2)
expect(actual).to have_selector('input[checked]', count: 3)
+ expect(actual).to have_selector('input[data-inapplicable]', count: 2)
end
end
diff --git a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
index 62d708420c3..5fcb14e075a 100644
--- a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
+++ b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
@@ -12,17 +12,17 @@ RSpec.shared_context 'bulk imports requests context' do |url|
}
end
- let(:request_headers) { { 'Authorization' => 'Bearer demo-pat', 'Content-Type' => 'application/json' } }
+ let(:request_headers) { { 'Content-Type' => 'application/json' } }
before do
- stub_request(:get, "#{url}/api/v4/version")
+ stub_request(:get, "#{url}/api/v4/version?page=1&per_page=20&private_token=demo-pat")
.with(headers: request_headers)
.to_return(
status: 200,
body: { version: ::BulkImport.min_gl_version_for_project_migration.to_s }.to_json,
headers: { 'Content-Type' => 'application/json' })
- stub_request(:get, "https://gitlab.example.com/api/v4/groups?min_access_level=50&page=1&per_page=20&search=test&top_level_only=true")
+ stub_request(:get, "https://gitlab.example.com/api/v4/groups?min_access_level=50&page=1&per_page=20&private_token=demo-pat&search=test&top_level_only=true")
.with(headers: request_headers)
.to_return(status: 200,
body: [{
@@ -33,10 +33,9 @@ RSpec.shared_context 'bulk imports requests context' do |url|
full_name: 'Test',
full_path: 'stub-test-group'
}].to_json,
- headers: page_response_headers
- )
+ headers: page_response_headers)
- stub_request(:get, "%{url}/api/v4/groups?page=1&per_page=20&top_level_only=true&min_access_level=50&search=" % { url: url })
+ stub_request(:get, "%{url}/api/v4/groups?min_access_level=50&page=1&per_page=20&private_token=demo-pat&search=&top_level_only=true" % { url: url })
.to_return(
body: [{
id: 2595438,
@@ -46,7 +45,6 @@ RSpec.shared_context 'bulk imports requests context' do |url|
full_name: 'Stub',
full_path: 'stub-group'
}].to_json,
- headers: page_response_headers
- )
+ headers: page_response_headers)
end
end
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index 255c4e6f882..ca2fe8a6c54 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -66,7 +66,7 @@ Integration.available_integration_names.each do |integration|
hash.merge!(k => 'foo@bar.com')
elsif (integration == 'slack' || integration == 'mattermost') && k == :labels_to_be_notified_behavior
hash.merge!(k => "match_any")
- elsif integration == 'campfire' && k = :room
+ elsif integration == 'campfire' && k == :room
hash.merge!(k => '1234')
else
hash.merge!(k => "someword")
diff --git a/spec/support/shared_contexts/fixtures/analytics_shared_context.rb b/spec/support/shared_contexts/fixtures/analytics_shared_context.rb
index 13d3697a378..8e09cccee3e 100644
--- a/spec/support/shared_contexts/fixtures/analytics_shared_context.rb
+++ b/spec/support/shared_contexts/fixtures/analytics_shared_context.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
RSpec.shared_context 'Analytics fixtures shared context' do
+ include CycleAnalyticsHelpers
include JavaScriptFixturesHelpers
let_it_be(:group) { create(:group) }
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
index 449db59e35d..b6c54e902a2 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -17,6 +17,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
let(:elasticsearch_seconds_metric) { double('elasticsearch seconds metric') }
let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
let(:load_balancing_metric) { double('load balancing metric') }
+ let(:sidekiq_mem_total_bytes) { double('sidekiq mem total bytes') }
before do
allow(Gitlab::Metrics).to receive(:histogram).and_call_original
@@ -37,6 +38,7 @@ RSpec.shared_context 'server metrics with mocked prometheus' do
allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_load_balancing_count, anything).and_return(load_balancing_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
+ allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_mem_total_bytes, anything, {}, :all).and_return(sidekiq_mem_total_bytes)
allow(concurrency_metric).to receive(:set)
end
@@ -61,13 +63,16 @@ RSpec.shared_context 'server metrics call' do
let(:elasticsearch_calls) { 8 }
let(:elasticsearch_duration) { 0.54 }
+
+ let(:mem_total_bytes) { 1000000000 }
let(:instrumentation) do
{
gitaly_duration_s: gitaly_duration,
redis_calls: redis_calls,
redis_duration_s: redis_duration,
elasticsearch_calls: elasticsearch_calls,
- elasticsearch_duration_s: elasticsearch_duration
+ elasticsearch_duration_s: elasticsearch_duration,
+ mem_total_bytes: mem_total_bytes
}
end
@@ -95,5 +100,6 @@ RSpec.shared_context 'server metrics call' do
allow(completion_seconds_metric).to receive(:observe)
allow(redis_seconds_metric).to receive(:observe)
allow(elasticsearch_seconds_metric).to receive(:observe)
+ allow(sidekiq_mem_total_bytes).to receive(:set)
end
end
diff --git a/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
index a90fe9e1723..040b2da9f37 100644
--- a/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
+++ b/spec/support/shared_contexts/markdown_snapshot_shared_examples.rb
@@ -9,6 +9,9 @@ RSpec.shared_context 'with API::Markdown Snapshot shared context' do |glfm_speci
# rubocop:enable Layout/LineLength
include ApiHelpers
+ let_it_be(:user) { create(:user) }
+ let_it_be(:api_url) { api('/markdown', user) }
+
markdown_examples, html_examples = %w[markdown.yml html.yml].map do |file_name|
yaml = File.read("#{glfm_specification_dir}/example_snapshots/#{file_name}")
YAML.safe_load(yaml, symbolize_names: true, aliases: true)
@@ -29,8 +32,6 @@ RSpec.shared_context 'with API::Markdown Snapshot shared context' do |glfm_speci
let(:normalizations) { normalizations_by_example_name.dig(name, :html, :static, :snapshot) }
it "verifies conversion of GLFM to HTML", :unlimited_max_formatted_output_length do
- api_url = api "/markdown"
-
# noinspection RubyResolve
normalized_html = normalize_html(html, normalizations)
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index eec6e92c5fe..893d3702407 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -56,6 +56,7 @@ RSpec.shared_context 'GroupPolicy context' do
admin_package
create_projects
create_cluster update_cluster admin_cluster add_cluster
+ destroy_upload
]
end
diff --git a/spec/support/shared_contexts/policies/project_policy_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
index 789b385c435..1d4731d9b39 100644
--- a/spec/support/shared_contexts/policies/project_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_shared_context.rb
@@ -62,6 +62,7 @@ RSpec.shared_context 'ProjectPolicy context' do
admin_project admin_project_member admin_snippet admin_terraform_state
admin_wiki create_deploy_token destroy_deploy_token
push_to_delete_protected_branch read_deploy_token update_snippet
+ destroy_upload
]
end
diff --git a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
index fbd82fbbe31..b18ce14eba6 100644
--- a/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
+++ b/spec/support/shared_contexts/policies/project_policy_table_shared_context.rb
@@ -545,5 +545,62 @@ RSpec.shared_context 'ProjectPolicyTable context' do
:private | :non_member | nil | 0
:private | :anonymous | nil | 0
end
+
+ # Based on the permission_table_for_reporter_feature_access table, but for issue
+ # features where public and internal projects with issues enabled only allow
+ # access to reporters and above (excluding admins if admin mode is disabled)
+ #
+ # project_level, :feature_access_level, :membership, :admin_mode, :expected_count
+ def permission_table_for_reporter_issue_access
+ :public | :enabled | :admin | true | 1
+ :public | :enabled | :admin | false | 0
+ :public | :enabled | :reporter | nil | 1
+ :public | :enabled | :guest | nil | 0
+ :public | :enabled | :non_member | nil | 0
+ :public | :enabled | :anonymous | nil | 0
+
+ :public | :private | :admin | true | 1
+ :public | :private | :admin | false | 0
+ :public | :private | :reporter | nil | 1
+ :public | :private | :guest | nil | 0
+ :public | :private | :non_member | nil | 0
+ :public | :private | :anonymous | nil | 0
+
+ :public | :disabled | :reporter | nil | 0
+ :public | :disabled | :guest | nil | 0
+ :public | :disabled | :non_member | nil | 0
+ :public | :disabled | :anonymous | nil | 0
+
+ :internal | :enabled | :admin | true | 1
+ :internal | :enabled | :admin | false | 0
+ :internal | :enabled | :reporter | nil | 1
+ :internal | :enabled | :guest | nil | 0
+ :internal | :enabled | :non_member | nil | 0
+ :internal | :enabled | :anonymous | nil | 0
+
+ :internal | :private | :admin | true | 1
+ :internal | :private | :admin | false | 0
+ :internal | :private | :reporter | nil | 1
+ :internal | :private | :guest | nil | 0
+ :internal | :private | :non_member | nil | 0
+ :internal | :private | :anonymous | nil | 0
+
+ :internal | :disabled | :reporter | nil | 0
+ :internal | :disabled | :guest | nil | 0
+ :internal | :disabled | :non_member | nil | 0
+ :internal | :disabled | :anonymous | nil | 0
+
+ :private | :private | :admin | true | 1
+ :private | :private | :admin | false | 0
+ :private | :private | :reporter | nil | 1
+ :private | :private | :guest | nil | 0
+ :private | :private | :non_member | nil | 0
+ :private | :private | :anonymous | nil | 0
+
+ :private | :disabled | :reporter | nil | 0
+ :private | :disabled | :guest | nil | 0
+ :private | :disabled | :non_member | nil | 0
+ :private | :disabled | :anonymous | nil | 0
+ end
# rubocop:enable Metrics/AbcSize
end
diff --git a/spec/support/shared_contexts/upload_type_check_shared_context.rb b/spec/support/shared_contexts/upload_type_check_shared_context.rb
index 5fce31b4a15..57b8d7472df 100644
--- a/spec/support/shared_contexts/upload_type_check_shared_context.rb
+++ b/spec/support/shared_contexts/upload_type_check_shared_context.rb
@@ -3,7 +3,7 @@
# Construct an `uploader` variable that is configured to `check_upload_type`
# with `mime_types` and `extensions`.
# @param uploader [CarrierWave::Uploader::Base] uploader with extension_whitelist method.
-RSpec.shared_context 'ignore extension whitelist check' do
+RSpec.shared_context 'ignore extension allowlist check' do
before do
allow(uploader).to receive(:extension_whitelist).and_return(nil)
end
@@ -16,3 +16,15 @@ RSpec.shared_context 'force content type detection to mime_type' do
allow(Gitlab::Utils::MimeType).to receive(:from_io).and_return(mime_type)
end
end
+
+def mock_upload(success = true)
+ allow(UploadService).to receive(:new).with(project, file).and_return(upload_service)
+
+ if success
+ allow(upload_service).to receive(:execute).and_return(uploader)
+ allow(uploader).to receive(:upload).and_return(upload)
+ allow(upload).to receive(:id).and_return(upload_id)
+ else
+ allow(upload_service).to receive(:execute).and_return(nil)
+ end
+end
diff --git a/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb b/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb
deleted file mode 100644
index 7fe696abc69..00000000000
--- a/spec/support/shared_examples/attention_request_cache_invalidation_examples.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'invalidates attention request cache' do
- it 'invalidates the merge requests requiring attention count' do
- cache_mock = double
-
- users.each do |user|
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'attention_requested_open_merge_requests_count'])
- end
-
- allow(Rails).to receive(:cache).and_return(cache_mock)
-
- service.execute
- end
-end
diff --git a/spec/support/shared_examples/boards/destroy_service_shared_examples.rb b/spec/support/shared_examples/boards/destroy_service_shared_examples.rb
index 33bae3da44b..b1cb58a736f 100644
--- a/spec/support/shared_examples/boards/destroy_service_shared_examples.rb
+++ b/spec/support/shared_examples/boards/destroy_service_shared_examples.rb
@@ -20,10 +20,10 @@ RSpec.shared_examples 'board destroy service' do
end
context 'when there is only one board' do
- it 'does not remove board' do
+ it 'does remove board' do
expect do
- expect(service.execute(board)).to be_error
- end.not_to change(boards, :count)
+ service.execute(board)
+ end.to change(boards, :count).by(-1)
end
end
end
diff --git a/spec/support/shared_examples/components/pajamas_shared_examples.rb b/spec/support/shared_examples/components/pajamas_shared_examples.rb
index 5c0ad1a1bc9..bcf7df24fd9 100644
--- a/spec/support/shared_examples/components/pajamas_shared_examples.rb
+++ b/spec/support/shared_examples/components/pajamas_shared_examples.rb
@@ -2,12 +2,18 @@
RSpec.shared_examples 'it renders help text' do
it 'renders help text' do
- expect(rendered_component).to have_selector('[data-testid="pajamas-component-help-text"]', text: help_text)
+ expect(page).to have_css('[data-testid="pajamas-component-help-text"]', text: help_text)
end
end
RSpec.shared_examples 'it does not render help text' do
it 'does not render help text' do
- expect(rendered_component).not_to have_selector('[data-testid="pajamas-component-help-text"]')
+ expect(page).not_to have_css('[data-testid="pajamas-component-help-text"]')
+ end
+end
+
+RSpec.shared_examples 'it renders unchecked checkbox with value of `1`' do
+ it 'renders unchecked checkbox with value of `1`' do
+ expect(page).to have_unchecked_field(label, with: '1')
end
end
diff --git a/spec/support/shared_examples/controllers/search_cross_project_authorization_shared_examples.rb b/spec/support/shared_examples/controllers/search_cross_project_authorization_shared_examples.rb
new file mode 100644
index 00000000000..9421561aea4
--- /dev/null
+++ b/spec/support/shared_examples/controllers/search_cross_project_authorization_shared_examples.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'when the user cannot read cross project' do |action, params|
+ before do
+ allow(Ability).to receive(:allowed?).and_call_original
+ allow(Ability).to receive(:allowed?).with(user, :read_cross_project, :global).and_return(false)
+ end
+
+ it 'blocks access without a project_id' do
+ get action, params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'allows access with a project_id' do
+ get action, params: params.merge(project_id: create(:project, :public).id)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/search_external_authorization_service_shared_examples.rb b/spec/support/shared_examples/controllers/search_external_authorization_service_shared_examples.rb
new file mode 100644
index 00000000000..6b72988b3e6
--- /dev/null
+++ b/spec/support/shared_examples/controllers/search_external_authorization_service_shared_examples.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'with external authorization service enabled' do |action, params|
+ include ExternalAuthorizationServiceHelpers
+
+ let(:project) { create(:project, namespace: user.namespace) }
+ let(:note) { create(:note_on_issue, project: project) }
+
+ before do
+ enable_external_authorization_service_check
+ end
+
+ it 'renders a 403 when no project is given' do
+ get action, params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ it 'renders a 200 when a project was set' do
+ get action, params: params.merge(project_id: project.id)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+end
diff --git a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
index 98fc52add51..2e691d1b36f 100644
--- a/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
+++ b/spec/support/shared_examples/controllers/snowplow_event_tracking_examples.rb
@@ -2,22 +2,26 @@
#
# Requires a context containing:
# - subject
-# - project
# - feature_flag_name
# - category
# - action
# - namespace
+# Optionaly, the context can contain:
+# - project
+# - property
# - user
+# - label
+# - **extra
-shared_examples 'Snowplow event tracking' do
- let(:label) { nil }
+shared_examples 'Snowplow event tracking' do |overrides: {}|
+ let(:extra) { {} }
it 'is not emitted if FF is disabled' do
stub_feature_flags(feature_flag_name => false)
subject
- expect_no_snowplow_event
+ expect_no_snowplow_event(category: category, action: action)
end
it 'is emitted' do
@@ -25,10 +29,11 @@ shared_examples 'Snowplow event tracking' do
category: category,
action: action,
namespace: namespace,
- user: user,
- project: project,
- label: label
- }.compact
+ user: try(:user),
+ project: try(:project),
+ label: try(:label),
+ property: try(:property)
+ }.merge(overrides).compact.merge(extra)
subject
diff --git a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
index 6dca94ecf0a..0792ac14e47 100644
--- a/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/uploads_actions_shared_examples.rb
@@ -205,41 +205,13 @@ RSpec.shared_examples 'handle uploads' do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
end
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
+ it "responds with the appropriate status code" do
+ show_upload
- it "responds with appropriate status" do
- show_upload
-
- # We're switching here based on the class due to the feature
- # flag :enforce_auth_checks_on_uploads switching on project.
- # When it is enabled fully, we will apply the code it guards
- # to both Projects::UploadsController as well as
- # Groups::UploadsController.
- #
- # https://gitlab.com/gitlab-org/gitlab/-/issues/352291
- #
- if model.instance_of?(Group)
- expect(response).to have_gitlab_http_status(:ok)
- else
- expect(response).to have_gitlab_http_status(:redirect)
- end
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
-
- expect(response).to have_gitlab_http_status(:ok)
- end
+ if model.instance_of?(Group)
+ expect(response).to have_gitlab_http_status(:ok)
+ else
+ expect(response).to have_gitlab_http_status(:redirect)
end
end
end
@@ -308,41 +280,13 @@ RSpec.shared_examples 'handle uploads' do
allow_any_instance_of(FileUploader).to receive(:image?).and_return(true)
end
- context "enforce_auth_checks_on_uploads feature flag" do
- context "with flag enabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: true)
- end
-
- it "responds with status 404" do
- show_upload
-
- # We're switching here based on the class due to the feature
- # flag :enforce_auth_checks_on_uploads switching on
- # project. When it is enabled fully, we will apply the
- # code it guards to both Projects::UploadsController as
- # well as Groups::UploadsController.
- #
- # https://gitlab.com/gitlab-org/gitlab/-/issues/352291
- #
- if model.instance_of?(Group)
- expect(response).to have_gitlab_http_status(:ok)
- else
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
- end
-
- context "with flag disabled" do
- before do
- stub_feature_flags(enforce_auth_checks_on_uploads: false)
- end
-
- it "responds with status 200" do
- show_upload
+ it "responds with the appropriate status code" do
+ show_upload
- expect(response).to have_gitlab_http_status(:ok)
- end
+ if model.instance_of?(Group)
+ expect(response).to have_gitlab_http_status(:ok)
+ else
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
end
diff --git a/spec/support/shared_examples/features/access_tokens_shared_examples.rb b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
index c162ed36881..0fc45b154d8 100644
--- a/spec/support/shared_examples/features/access_tokens_shared_examples.rb
+++ b/spec/support/shared_examples/features/access_tokens_shared_examples.rb
@@ -38,7 +38,7 @@ RSpec.shared_examples 'resource access tokens creation' do |resource_type|
expect(active_resource_access_tokens).to have_text('in')
expect(active_resource_access_tokens).to have_text('read_api')
expect(active_resource_access_tokens).to have_text('read_repository')
- expect(active_resource_access_tokens).to have_text('Maintainer')
+ expect(active_resource_access_tokens).to have_text('Guest')
expect(created_resource_access_token).not_to be_empty
end
end
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index 0ea82f37db0..3fa7beea97e 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -13,9 +13,8 @@ RSpec.shared_examples 'edits content using the content editor' do
expect(page).to have_css('[data-testid="formatting-bubble-menu"]')
end
- it 'does not show a formatting bubble menu for code' do
- find(content_editor_testid).send_keys 'This is a `code`'
- find(content_editor_testid).send_keys [:shift, :left]
+ it 'does not show a formatting bubble menu for code blocks' do
+ find(content_editor_testid).send_keys '```js '
expect(page).not_to have_css('[data-testid="formatting-bubble-menu"]')
end
diff --git a/spec/support/shared_examples/features/inviting_members_shared_examples.rb b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
index bca0e02fcdd..277ec6a7fa7 100644
--- a/spec/support/shared_examples/features/inviting_members_shared_examples.rb
+++ b/spec/support/shared_examples/features/inviting_members_shared_examples.rb
@@ -147,9 +147,9 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
invite_member(user2.name, role: role, refresh: false)
- expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_content "#{user2.name}: Access level should be greater than or equal to Developer " \
- "inherited membership from group #{group.name}"
+ invite_modal = page.find(invite_modal_selector)
+ expect(invite_modal).to have_content "#{user2.name}: Access level should be greater than or equal to " \
+ "Developer inherited membership from group #{group.name}"
page.refresh
@@ -166,31 +166,85 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
group.add_maintainer(user3)
end
- it 'shows the user errors and then removes them from the form', :js do
- visit subentity_members_page_path
+ it 'shows the partial user error and success and then removes them from the form', :js do
+ user4 = create(:user)
+ user5 = create(:user)
+ user6 = create(:user)
+ user7 = create(:user)
+
+ group.add_maintainer(user6)
+ group.add_maintainer(user7)
- invite_member([user2.name, user3.name], role: role, refresh: false)
+ visit subentity_members_page_path
- expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_selector(member_token_error_selector(user2.id))
- expect(page).to have_selector(member_token_error_selector(user3.id))
- expect(page).to have_text("The following 2 members couldn't be invited")
- expect(page).to have_text("#{user2.name}: Access level should be greater than or equal to")
- expect(page).to have_text("#{user3.name}: Access level should be greater than or equal to")
+ invite_member([user2.name, user3.name, user4.name, user6.name, user7.name], role: role, refresh: false)
+
+ # we have more than 2 errors, so one will be hidden
+ invite_modal = page.find(invite_modal_selector)
+ expect(invite_modal).to have_text("The following 4 members couldn't be invited")
+ expect(invite_modal).to have_selector(limited_invite_error_selector, count: 2, visible: :visible)
+ expect(invite_modal).to have_selector(expanded_invite_error_selector, count: 2, visible: :hidden)
+ # unpredictability of return order means we can't rely on message showing in any order here
+ # so we will not expect on the message
+ expect_to_have_invalid_invite_indicator(invite_modal, user2, message: false)
+ expect_to_have_invalid_invite_indicator(invite_modal, user3, message: false)
+ expect_to_have_invalid_invite_indicator(invite_modal, user6, message: false)
+ expect_to_have_invalid_invite_indicator(invite_modal, user7, message: false)
+ expect_to_have_successful_invite_indicator(invite_modal, user4)
+ expect(invite_modal).to have_button('Show more (2)')
+
+ # now we want to test the show more errors count logic
+ remove_token(user7.id)
+
+ # count decreases from 4 to 3 and 2 to 1
+ expect(invite_modal).to have_text("The following 3 members couldn't be invited")
+ expect(invite_modal).to have_button('Show more (1)')
+
+ # we want to show this error now for user6
+ invite_modal.find(more_invite_errors_button_selector).click
+
+ # now we should see the error for all users and our collapse button text
+ expect(invite_modal).to have_selector(limited_invite_error_selector, count: 2, visible: :visible)
+ expect(invite_modal).to have_selector(expanded_invite_error_selector, count: 1, visible: :visible)
+ expect_to_have_invalid_invite_indicator(invite_modal, user2, message: true)
+ expect_to_have_invalid_invite_indicator(invite_modal, user3, message: true)
+ expect_to_have_invalid_invite_indicator(invite_modal, user6, message: true)
+ expect(invite_modal).to have_button('Show less')
+
+ # adds new token, but doesn't submit
+ select_members(user5.name)
+
+ expect_to_have_normal_invite_indicator(invite_modal, user5)
remove_token(user2.id)
- expect(page).not_to have_selector(member_token_error_selector(user2.id))
- expect(page).to have_selector(member_token_error_selector(user3.id))
- expect(page).to have_text("The following member couldn't be invited")
- expect(page).not_to have_text("#{user2.name}: Access level should be greater than or equal to")
+ expect(invite_modal).to have_text("The following 2 members couldn't be invited")
+ expect(invite_modal).not_to have_selector(more_invite_errors_button_selector)
+ expect_to_have_invite_removed(invite_modal, user2)
+ expect_to_have_invalid_invite_indicator(invite_modal, user3)
+ expect_to_have_invalid_invite_indicator(invite_modal, user6)
+ expect_to_have_successful_invite_indicator(invite_modal, user4)
+ expect_to_have_normal_invite_indicator(invite_modal, user5)
+
+ remove_token(user6.id)
+
+ expect(invite_modal).to have_text("The following member couldn't be invited")
+ expect_to_have_invite_removed(invite_modal, user6)
+ expect_to_have_invalid_invite_indicator(invite_modal, user3)
+ expect_to_have_successful_invite_indicator(invite_modal, user4)
+ expect_to_have_normal_invite_indicator(invite_modal, user5)
remove_token(user3.id)
- expect(page).not_to have_selector(member_token_error_selector(user3.id))
- expect(page).not_to have_text("The following member couldn't be invited")
- expect(page).not_to have_text("Review the invite errors and try again")
- expect(page).not_to have_text("#{user3.name}: Access level should be greater than or equal to")
+ expect(invite_modal).not_to have_text("The following member couldn't be invited")
+ expect(invite_modal).not_to have_text("Review the invite errors and try again")
+ expect_to_have_invite_removed(invite_modal, user3)
+ expect_to_have_successful_invite_indicator(invite_modal, user4)
+ expect_to_have_normal_invite_indicator(invite_modal, user5)
+
+ submit_invites
+
+ expect(page).not_to have_selector(invite_modal_selector)
page.refresh
@@ -203,6 +257,10 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
expect(page).to have_content('Maintainer')
expect(page).not_to have_button('Maintainer')
end
+
+ page.within find_invited_member_row(user4.name) do
+ expect(page).to have_button(role)
+ end
end
it 'only shows the error for an invalid formatted email and does not display other member errors', :js do
@@ -210,12 +268,12 @@ RSpec.shared_examples 'inviting members' do |snowplow_invite_label|
invite_member([user2.name, user3.name, 'bad@email'], role: role, refresh: false)
- expect(page).to have_selector(invite_modal_selector)
- expect(page).to have_text('email contains an invalid email address')
- expect(page).not_to have_text("The following 2 members couldn't be invited")
- expect(page).not_to have_text("Review the invite errors and try again")
- expect(page).not_to have_text("#{user2.name}: Access level should be greater than or equal to")
- expect(page).not_to have_text("#{user3.name}: Access level should be greater than or equal to")
+ invite_modal = page.find(invite_modal_selector)
+ expect(invite_modal).to have_text('email contains an invalid email address')
+ expect(invite_modal).not_to have_text("The following 2 members couldn't be invited")
+ expect(invite_modal).not_to have_text("Review the invite errors and try again")
+ expect(invite_modal).not_to have_text("#{user2.name}: Access level should be greater than or equal to")
+ expect(invite_modal).not_to have_text("#{user3.name}: Access level should be greater than or equal to")
end
end
end
diff --git a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
index bbde448a1a1..ef2683d6424 100644
--- a/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
+++ b/spec/support/shared_examples/features/multiple_assignees_widget_mr_shared_examples.rb
@@ -32,7 +32,7 @@ RSpec.shared_examples 'multiple assignees widget merge request' do |action, save
end
page.within '.dropdown-menu-user' do
- click_link user.name
+ click_button user.name
end
page.within '.issuable-sidebar' do
diff --git a/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
index 345dfbce423..95c0a76d726 100644
--- a/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
+++ b/spec/support/shared_examples/features/sidebar/sidebar_due_date_shared_examples.rb
@@ -16,7 +16,9 @@ RSpec.shared_examples 'date sidebar widget' do
page.within('[data-testid="sidebar-due-date"]') do
today = Date.today.day
- click_button 'Edit'
+ button = find_button('Edit')
+ scroll_to(button)
+ button.click
click_button today.to_s
diff --git a/spec/support/shared_examples/features/trial_email_validation_shared_example.rb b/spec/support/shared_examples/features/trial_email_validation_shared_example.rb
new file mode 100644
index 00000000000..8304a91af86
--- /dev/null
+++ b/spec/support/shared_examples/features/trial_email_validation_shared_example.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'user email validation' do
+ let(:email_hint_message) { 'We recommend a work email address.' }
+ let(:email_error_message) { 'Please provide a valid email address.' }
+
+ let(:email_warning_message) do
+ 'This email address does not look right, are you sure you typed it correctly?'
+ end
+
+ context 'with trial_email_validation flag enabled' do
+ it 'shows an error message until a correct email is entered' do
+ visit path
+ expect(page).to have_content(email_hint_message)
+ expect(page).not_to have_content(email_error_message)
+ expect(page).not_to have_content(email_warning_message)
+
+ fill_in 'new_user_email', with: 'foo@'
+ fill_in 'new_user_first_name', with: ''
+
+ expect(page).not_to have_content(email_hint_message)
+ expect(page).to have_content(email_error_message)
+ expect(page).not_to have_content(email_warning_message)
+
+ fill_in 'new_user_email', with: 'foo@bar'
+ fill_in 'new_user_first_name', with: ''
+
+ expect(page).not_to have_content(email_hint_message)
+ expect(page).not_to have_content(email_error_message)
+ expect(page).to have_content(email_warning_message)
+
+ fill_in 'new_user_email', with: 'foo@gitlab.com'
+ fill_in 'new_user_first_name', with: ''
+
+ expect(page).not_to have_content(email_hint_message)
+ expect(page).not_to have_content(email_error_message)
+ expect(page).not_to have_content(email_warning_message)
+ end
+ end
+
+ context 'when trial_email_validation flag disabled' do
+ before do
+ stub_feature_flags trial_email_validation: false
+ end
+
+ it 'does not show an error message' do
+ visit path
+ expect(page).to have_content(email_hint_message)
+ expect(page).not_to have_content(email_error_message)
+ expect(page).not_to have_content(email_warning_message)
+
+ fill_in 'new_user_email', with: 'foo@'
+
+ expect(page).to have_content(email_hint_message)
+ expect(page).not_to have_content(email_error_message)
+ expect(page).not_to have_content(email_warning_message)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/user_views_tag_shared_examples.rb b/spec/support/shared_examples/features/user_views_tag_shared_examples.rb
new file mode 100644
index 00000000000..989de1dbfbb
--- /dev/null
+++ b/spec/support/shared_examples/features/user_views_tag_shared_examples.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'user views tag' do
+ context 'when user views with the tag' do
+ let(:project) { create(:project, :repository) }
+ let(:user) { create(:user) }
+ let(:tag_name) { "stable" }
+ let!(:release) { create(:release, project: project, tag: tag_name, name: "ReleaseName") }
+
+ before do
+ project.add_developer(user)
+ project.repository.add_tag(user, tag_name, project.default_branch_or_main)
+
+ sign_in(user)
+ end
+
+ shared_examples 'shows tag' do
+ it do
+ visit tag_page
+
+ expect(page).to have_content tag_name
+ expect(page).to have_link("ReleaseName", href: project_release_path(project, release))
+ end
+ end
+
+ it_behaves_like 'shows tag'
+
+ context 'when tag name contains a slash' do
+ let(:tag_name) { "stable/v0.1" }
+
+ it_behaves_like 'shows tag'
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/variable_list_shared_examples.rb b/spec/support/shared_examples/features/variable_list_shared_examples.rb
index c63faace6b2..9d81c0e9a3e 100644
--- a/spec/support/shared_examples/features/variable_list_shared_examples.rb
+++ b/spec/support/shared_examples/features/variable_list_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'variable list' do
+RSpec.shared_examples 'variable list' do |is_admin|
it 'shows a list of variables' do
page.within('[data-testid="ci-variable-table"]') do
expect(find('.js-ci-variable-row:nth-child(1) td[data-label="Key"]').text).to eq(variable.key)
@@ -166,7 +166,7 @@ RSpec.shared_examples 'variable list' do
wait_for_requests
expect(find('.flash-container')).to be_present
- expect(find('[data-testid="alert-danger"]').text).to have_content('Variables key (key) has already been taken')
+ expect(find('[data-testid="alert-danger"]').text).to have_content('(key) has already been taken')
end
it 'prevents a variable to be added if no values are provided when a variable is set to masked' do
@@ -257,7 +257,11 @@ RSpec.shared_examples 'variable list' do
end
it 'shows a message regarding the changed default' do
- expect(page).to have_content 'Environment variables are configured by your administrator to be protected by default'
+ if is_admin
+ expect(page).to have_content 'Environment variables on this GitLab instance are configured to be protected by default'
+ else
+ expect(page).to have_content 'Environment variables are configured by your administrator to be protected by default'
+ end
end
end
diff --git a/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
index 0ef1ccdfe57..8d1502bed84 100644
--- a/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/file_attachments_shared_examples.rb
@@ -12,8 +12,8 @@ RSpec.shared_examples 'wiki file attachments' do
end
context 'before uploading' do
- it 'shows "Attach a file" button' do
- expect(page).to have_button('Attach a file')
+ it 'shows "Attach a file or image" button' do
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
end
@@ -26,7 +26,7 @@ RSpec.shared_examples 'wiki file attachments' do
click_button 'Cancel'
end
- expect(page).to have_button('Attach a file')
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_button('Cancel')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
@@ -41,11 +41,11 @@ RSpec.shared_examples 'wiki file attachments' do
end
context 'uploading is complete' do
- it 'shows "Attach a file" button on uploading complete' do
+ it 'shows "Attach a file or image" button on uploading complete' do
attach_with_dropzone
wait_for_requests
- expect(page).to have_button('Attach a file')
+ expect(page).to have_selector('[data-testid="button-attach-file"]')
expect(page).not_to have_selector('.uploading-progress-container', visible: true)
end
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index 79c7c1891ac..87067336a36 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -140,7 +140,7 @@ RSpec.shared_examples 'User updates wiki page' do
context 'when using the content editor' do
context 'with feature flag on' do
before do
- click_button 'Edit rich text'
+ find('[data-testid="toggle-editing-mode-button"] label', text: 'Rich text').click
end
it_behaves_like 'edits content using the content editor'
diff --git a/spec/support/shared_examples/graphql/mutations/timelogs/create_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/timelogs/create_shared_examples.rb
new file mode 100644
index 00000000000..f28348fb945
--- /dev/null
+++ b/spec/support/shared_examples/graphql/mutations/timelogs/create_shared_examples.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable supports timelog creation mutation' do
+ context 'when the user is anonymous' do
+ before do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when the user is a guest member of the namespace' do
+ let(:current_user) { create(:user) }
+
+ before do
+ users_container.add_guest(current_user)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user has permissions to create a timelog' do
+ let(:current_user) { author }
+
+ before do
+ users_container.add_reporter(current_user)
+ end
+
+ context 'with valid data' do
+ it 'creates the timelog' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change(Timelog, :count).by(1)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(mutation_response['timelog']).to include(
+ 'timeSpent' => 3600,
+ 'spentAt' => '2022-07-08T00:00:00Z',
+ 'summary' => 'Test summary'
+ )
+ end
+ end
+
+ context 'with invalid time_spent' do
+ let(:time_spent) { '3h e' }
+
+ it 'returns an error' do
+ expect do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end.to change(Timelog, :count).by(0)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to match_array(['Time spent can\'t be blank'])
+ expect(mutation_response['timelog']).to be_nil
+ end
+ end
+ end
+end
+
+RSpec.shared_examples 'issuable does not support timelog creation mutation' do
+ context 'when the user is anonymous' do
+ before do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when the user is a guest member of the namespace' do
+ let(:current_user) { create(:user) }
+
+ before do
+ users_container.add_guest(current_user)
+
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { contain_exactly(include('is not a valid ID for')) }
+ end
+ end
+
+ context 'when user has permissions to create a timelog' do
+ let(:current_user) { author }
+
+ before do
+ users_container.add_reporter(current_user)
+ end
+
+ it_behaves_like 'a mutation that returns top-level errors' do
+ let(:match_errors) { contain_exactly(include('is not a valid ID for')) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb
deleted file mode 100644
index 3c32b7e0310..00000000000
--- a/spec/support/shared_examples/graphql/mutations/work_items/update_weight_widget_shared_examples.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'update work item weight widget' do
- it 'updates the weight widget' do
- expect do
- post_graphql_mutation(mutation, current_user: current_user)
- work_item.reload
- end.to change(work_item, :weight).from(nil).to(new_weight)
-
- expect(response).to have_gitlab_http_status(:success)
- expect(mutation_response['workItem']['widgets']).to include(
- {
- 'weight' => new_weight,
- 'type' => 'WEIGHT'
- }
- )
- end
-
- context 'when the updated work item is not valid' do
- it 'returns validation errors without the work item' do
- errors = ActiveModel::Errors.new(work_item).tap { |e| e.add(:weight, 'error message') }
-
- allow_next_found_instance_of(::WorkItem) do |instance|
- allow(instance).to receive(:valid?).and_return(false)
- allow(instance).to receive(:errors).and_return(errors)
- end
-
- post_graphql_mutation(mutation, current_user: current_user)
-
- expect(mutation_response['workItem']).to be_nil
- expect(mutation_response['errors']).to match_array(['Weight error message'])
- end
- end
-end
diff --git a/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
index 2c6118779e6..0aa3bf8944f 100644
--- a/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_creation_shared_examples.rb
@@ -94,5 +94,6 @@ RSpec.shared_examples 'a Note mutation with confidential notes' do
expect(mutation_response).to have_key('note')
expect(mutation_response['note']['confidential']).to eq(true)
+ expect(mutation_response['note']['internal']).to eq(true)
end
end
diff --git a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
index 7fd54408b11..2d7da9f9f00 100644
--- a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
@@ -69,4 +69,21 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
'This feature is in Alpha. It can be changed or removed at any time. Introduced in 1.10.'
)
end
+
+ it 'supports :alpha' do
+ deprecable = subject(alpha: { milestone: '1.10' })
+
+ expect(deprecable.deprecation_reason).to eq(
+ 'This feature is in Alpha. It can be changed or removed at any time. Introduced in 1.10.'
+ )
+ end
+
+ it 'does not allow :alpha and :deprecated together' do
+ expect do
+ subject(alpha: { milestone: '1.10' }, deprecated: { milestone: '1.10', reason: 'my reason' } )
+ end.to raise_error(
+ ArgumentError,
+ eq("`alpha` and `deprecated` arguments cannot be passed at the same time")
+ )
+ end
end
diff --git a/spec/support/shared_examples/helpers/wiki_helpers_shared_examples.rb b/spec/support/shared_examples/helpers/wiki_helpers_shared_examples.rb
index c2c27fb65ca..61c8a3f47df 100644
--- a/spec/support/shared_examples/helpers/wiki_helpers_shared_examples.rb
+++ b/spec/support/shared_examples/helpers/wiki_helpers_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'wiki endpoint helpers' do
let(:resource_path) { page.wiki.container.class.to_s.pluralize.downcase }
- let(:url) { "/api/v4/#{resource_path}/#{page.wiki.container.id}/wikis/#{page.slug}?version=#{page.version.id}"}
+ let(:url) { "/api/v4/#{resource_path}/#{page.wiki.container.id}/wikis/#{page.slug}?version=#{page.version.id}" }
it 'returns the full endpoint url' do
expect(helper.wiki_page_render_api_endpoint(page)).to end_with(url)
diff --git a/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
index 95772b1774a..5eae8777a20 100644
--- a/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/config/inheritable_shared_examples.rb
@@ -86,7 +86,10 @@ RSpec.shared_examples 'with inheritable CI config' do
expect do
# we ignore exceptions as `#overwrite_entry`
# can raise exception on duplicates
- entry.send(:inherit!, deps) rescue described_class::InheritError
+
+ entry.send(:inherit!, deps)
+ rescue described_class::InheritError
+ nil
end.not_to change { entry[entry_key] }
end
end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
index 9d280d9404a..481e11bcf0e 100644
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
@@ -33,7 +33,7 @@ RSpec.shared_examples 'does not track when feature flag is disabled' do |feature
end
end
-RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events' do
+RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events for given event params' do
before do
stub_application_setting(usage_ping_enabled: true)
end
@@ -44,22 +44,21 @@ RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events
specify do
aggregate_failures do
- expect(track_action(author: user1, project: project)).to be_truthy
- expect(track_action(author: user1, project: project)).to be_truthy
- expect(track_action(author: user2, project: project)).to be_truthy
+ expect(track_action({ author: user1 }.merge(track_params))).to be_truthy
+ expect(track_action({ author: user1 }.merge(track_params))).to be_truthy
+ expect(track_action({ author: user2 }.merge(track_params))).to be_truthy
expect(count_unique).to eq(2)
end
end
it 'does not track edit actions if author is not present' do
- expect(track_action(author: nil, project: project)).to be_nil
+ expect(track_action({ author: nil }.merge(track_params))).to be_nil
end
it 'emits snowplow event' do
- track_action(author: user1, project: project)
+ track_action({ author: user1 }.merge(track_params))
- expect_snowplow_event(category: 'issues_edit', action: action, user: user1,
- namespace: project.namespace, project: project)
+ expect_snowplow_event(**{ category: category, action: event_action, user: user1 }.merge(event_params))
end
context 'with route_hll_to_snowplow_phase2 disabled' do
@@ -68,9 +67,33 @@ RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events
end
it 'does not emit snowplow event' do
- track_action(author: user1, project: project)
+ track_action({ author: user1 }.merge(track_params))
expect_no_snowplow_event
end
end
end
+
+RSpec.shared_examples 'daily tracked issuable snowplow and service ping events with project' do
+ it_behaves_like 'a daily tracked issuable snowplow and service ping events for given event params' do
+ let(:track_params) { { project: project } }
+ let(:event_params) { track_params.merge(label: event_label, property: event_property, namespace: project.namespace) }
+ end
+end
+
+RSpec.shared_examples 'a daily tracked issuable snowplow and service ping events with namespace' do
+ it_behaves_like 'a daily tracked issuable snowplow and service ping events for given event params' do
+ let(:track_params) { { namespace: namespace } }
+ let(:event_params) { track_params.merge(label: event_label, property: event_property) }
+ end
+end
+
+RSpec.shared_examples 'does not track with namespace when feature flag is disabled' do |feature_flag|
+ context "when feature flag #{feature_flag} is disabled" do
+ it 'does not track action' do
+ stub_feature_flags(feature_flag => false)
+
+ expect(track_action(author: user1, namespace: namespace)).to be_nil
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/chat_integration_shared_examples.rb b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
index d189e91effd..fb08784f34f 100644
--- a/spec/support/shared_examples/models/chat_integration_shared_examples.rb
+++ b/spec/support/shared_examples/models/chat_integration_shared_examples.rb
@@ -3,7 +3,6 @@
RSpec.shared_examples "chat integration" do |integration_name|
describe "Associations" do
it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
end
describe "Validations" do
@@ -13,6 +12,7 @@ RSpec.shared_examples "chat integration" do |integration_name|
end
it { is_expected.to validate_presence_of(:webhook) }
+
it_behaves_like "issue tracker integration URL attribute", :webhook
end
diff --git a/spec/support/shared_examples/models/ci/metadata_id_tokens_shared_examples.rb b/spec/support/shared_examples/models/ci/metadata_id_tokens_shared_examples.rb
new file mode 100644
index 00000000000..0c71ebe7a4d
--- /dev/null
+++ b/spec/support/shared_examples/models/ci/metadata_id_tokens_shared_examples.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples_for 'has ID tokens' do |ci_type|
+ subject(:ci) { FactoryBot.build(ci_type) }
+
+ describe 'delegations' do
+ it { is_expected.to delegate_method(:id_tokens).to(:metadata).allow_nil }
+ end
+
+ describe '#id_tokens?' do
+ subject { ci.id_tokens? }
+
+ context 'without metadata' do
+ let(:ci) { FactoryBot.build(ci_type) }
+
+ it { is_expected.to be_falsy }
+ end
+
+ context 'with metadata' do
+ let(:ci) { FactoryBot.build(ci_type, metadata: FactoryBot.build(:ci_build_metadata, id_tokens: id_tokens)) }
+
+ context 'when ID tokens exist' do
+ let(:id_tokens) { { TEST_JOB_JWT: { id_token: { aud: 'developers ' } } } }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when ID tokens do not exist' do
+ let(:id_tokens) { {} }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+ end
+
+ describe '#id_tokens=' do
+ it 'assigns the ID tokens to the CI job' do
+ id_tokens = [{ 'JOB_ID_TOKEN' => { 'id_token' => { 'aud' => 'https://gitlab.test ' } } }]
+ ci.id_tokens = id_tokens
+
+ expect(ci.id_tokens).to match_array(id_tokens)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
index f92ed3d7396..f4d5ab3d5c6 100644
--- a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
@@ -2,6 +2,10 @@
require 'spec_helper'
RSpec.shared_examples_for CounterAttribute do |counter_attributes|
+ before do
+ Gitlab::ApplicationContext.push(feature_category: 'test', caller_id: 'caller')
+ end
+
it 'defines a Redis counter_key' do
expect(model.counter_key(:counter_name))
.to eq("project:{#{model.project_id}}:counters:CounterAttributeModel:#{model.id}:counter_name")
@@ -22,7 +26,21 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
where(:increment) { [10, -3] }
with_them do
- it 'increments the counter in Redis' do
+ it 'increments the counter in Redis and logs it' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Increment counter attribute',
+ attribute: attribute,
+ project_id: model.project_id,
+ increment: increment,
+ new_counter_value: 0 + increment,
+ current_db_value: model.read_attribute(attribute),
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
subject
Gitlab::Redis::SharedState.with do |redis|
@@ -86,7 +104,21 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
model.delayed_increment_counter(incremented_attribute, -3)
end
- it 'updates the record' do
+ it 'updates the record and logs it' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Flush counter attribute to database',
+ attribute: incremented_attribute,
+ project_id: model.project_id,
+ increment: 7,
+ previous_db_value: 0,
+ new_db_value: 7,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
expect { subject }.to change { model.reset.read_attribute(incremented_attribute) }.by(7)
end
@@ -153,4 +185,32 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
end
end
end
+
+ describe '#clear_counter!' do
+ let(:attribute) { counter_attributes.first }
+
+ before do
+ model.increment_counter(attribute, 10)
+ end
+
+ it 'deletes the counter key for the given attribute and logs it' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Clear counter attribute',
+ attribute: attribute,
+ project_id: model.project_id,
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
+
+ model.clear_counter!(attribute)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ key_exists = redis.exists(model.counter_key(attribute))
+ expect(key_exists).to be_falsey
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
index d80be5be3b3..7512a9f2855 100644
--- a/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/slack_mattermost_notifier_shared_examples.rb
@@ -13,7 +13,6 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
describe "Associations" do
it { is_expected.to belong_to :project }
- it { is_expected.to have_one :service_hook }
end
describe 'Validations' do
@@ -23,6 +22,7 @@ RSpec.shared_examples Integrations::SlackMattermostNotifier do |integration_name
end
it { is_expected.to validate_presence_of(:webhook) }
+
it_behaves_like 'issue tracker integration URL attribute', :webhook
end
diff --git a/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
index ae72cb6ec5d..2f693edeb53 100644
--- a/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
+++ b/spec/support/shared_examples/models/integrations/has_web_hook_shared_examples.rb
@@ -3,6 +3,10 @@
RSpec.shared_examples Integrations::HasWebHook do
include AfterNextHelpers
+ describe 'associations' do
+ it { is_expected.to have_one(:service_hook).inverse_of(:integration).with_foreign_key(:service_id) }
+ end
+
describe 'callbacks' do
it 'calls #update_web_hook! when enabled' do
expect(integration).to receive(:update_web_hook!)
diff --git a/spec/support/shared_examples/models/issuable_link_shared_examples.rb b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
index 9892e66b582..42c7be5ddc3 100644
--- a/spec/support/shared_examples/models/issuable_link_shared_examples.rb
+++ b/spec/support/shared_examples/models/issuable_link_shared_examples.rb
@@ -16,6 +16,7 @@ RSpec.shared_examples 'issuable link' do
it { is_expected.to validate_presence_of(:source) }
it { is_expected.to validate_presence_of(:target) }
+
it do
is_expected.to validate_uniqueness_of(:source)
.scoped_to(:target_id)
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index aa40a2c7135..287b046cbec 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -63,16 +63,23 @@ RSpec.shared_examples '#valid_level_roles' do |entity_name|
let(:entity) { create(entity_name) } # rubocop:disable Rails/SaveBang
let(:entity_member) { create("#{entity_name}_member", :developer, source: entity, user: member_user) }
let(:presenter) { described_class.new(entity_member, current_user: member_user) }
- let(:expected_roles) { { 'Developer' => 30, 'Maintainer' => 40, 'Reporter' => 20 } }
- it 'returns all roles when no parent member is present' do
- expect(presenter.valid_level_roles).to eq(entity_member.class.access_level_roles)
+ context 'when no parent member is present' do
+ let(:all_permissible_roles) { entity_member.class.permissible_access_level_roles(member_user, entity) }
+
+ it 'returns all permissible roles' do
+ expect(presenter.valid_level_roles).to eq(all_permissible_roles)
+ end
end
- it 'returns higher roles when a parent member is present' do
- group.add_reporter(member_user)
+ context 'when parent member is present' do
+ before do
+ group.add_reporter(member_user)
+ end
- expect(presenter.valid_level_roles).to eq(expected_roles)
+ it 'returns higher roles when a parent member is present' do
+ expect(presenter.valid_level_roles).to eq(expected_roles)
+ end
end
end
diff --git a/spec/support/shared_examples/models/project_shared_examples.rb b/spec/support/shared_examples/models/project_shared_examples.rb
index 475ac1da04b..0b880f00a22 100644
--- a/spec/support/shared_examples/models/project_shared_examples.rb
+++ b/spec/support/shared_examples/models/project_shared_examples.rb
@@ -25,3 +25,38 @@ RSpec.shared_examples 'returns true if project is inactive' do
end
end
end
+
+RSpec.shared_examples 'checks parent group feature flag' do
+ let(:group) { subject_project.group }
+ let(:root_group) { group.parent }
+
+ subject { subject_project.public_send(feature_flag_method) }
+
+ context 'when feature flag is disabled globally' do
+ before do
+ stub_feature_flags(feature_flag => false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when feature flag is enabled globally' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature flag is enabled for the root group' do
+ before do
+ stub_feature_flags(feature_flag => root_group)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when feature flag is enabled for the group' do
+ before do
+ stub_feature_flags(feature_flag => group)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+end
diff --git a/spec/support/shared_examples/models/taskable_shared_examples.rb b/spec/support/shared_examples/models/taskable_shared_examples.rb
index 34b1d735bcd..3ae240c8da8 100644
--- a/spec/support/shared_examples/models/taskable_shared_examples.rb
+++ b/spec/support/shared_examples/models/taskable_shared_examples.rb
@@ -18,9 +18,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('2 of')
- expect(subject.task_status).to match('5 tasks completed')
+ expect(subject.task_status).to match('5 checklist items completed')
expect(subject.task_status_short).to match('2/')
- expect(subject.task_status_short).to match('5 tasks')
+ expect(subject.task_status_short).to match('5 checklist items')
end
describe '#tasks?' do
@@ -53,9 +53,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('3 of')
- expect(subject.task_status).to match('9 tasks completed')
+ expect(subject.task_status).to match('9 checklist items completed')
expect(subject.task_status_short).to match('3/')
- expect(subject.task_status_short).to match('9 tasks')
+ expect(subject.task_status_short).to match('9 checklist items')
end
end
@@ -68,9 +68,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('0 of')
- expect(subject.task_status).to match('1 task completed')
+ expect(subject.task_status).to match('1 checklist item completed')
expect(subject.task_status_short).to match('0/')
- expect(subject.task_status_short).to match('1 task')
+ expect(subject.task_status_short).to match('1 checklist item')
end
end
@@ -87,9 +87,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('0 of')
- expect(subject.task_status).to match('0 tasks completed')
+ expect(subject.task_status).to match('0 checklist items completed')
expect(subject.task_status_short).to match('0/')
- expect(subject.task_status_short).to match('0 task')
+ expect(subject.task_status_short).to match('0 checklist items')
end
end
@@ -102,9 +102,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('1 of')
- expect(subject.task_status).to match('1 task completed')
+ expect(subject.task_status).to match('1 checklist item completed')
expect(subject.task_status_short).to match('1/')
- expect(subject.task_status_short).to match('1 task')
+ expect(subject.task_status_short).to match('1 checklist item')
end
end
@@ -123,9 +123,9 @@ RSpec.shared_examples 'a Taskable' do
it 'returns the correct task status' do
expect(subject.task_status).to match('2 of')
- expect(subject.task_status).to match('4 tasks completed')
+ expect(subject.task_status).to match('4 checklist items completed')
expect(subject.task_status_short).to match('2/')
- expect(subject.task_status_short).to match('4 tasks')
+ expect(subject.task_status_short).to match('4 checklist items')
end
end
end
diff --git a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
index 45da1d382c1..807295f8442 100644
--- a/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
+++ b/spec/support/shared_examples/namespaces/traversal_scope_examples.rb
@@ -273,14 +273,6 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendants'
end
-
- context 'with linear_scopes_superset feature flag disabled' do
- before do
- stub_feature_flags(linear_scopes_superset: false)
- end
-
- include_examples '.self_and_descendants'
- end
end
shared_examples '.self_and_descendant_ids' do
@@ -324,14 +316,6 @@ RSpec.shared_examples 'namespace traversal scopes' do
include_examples '.self_and_descendant_ids'
end
-
- context 'with linear_scopes_superset feature flag disabled' do
- before do
- stub_feature_flags(linear_scopes_superset: false)
- end
-
- include_examples '.self_and_descendant_ids'
- end
end
shared_examples '.self_and_hierarchy' do
diff --git a/spec/support/shared_examples/policies/group_project_namespace_policy_shared_examples.rb b/spec/support/shared_examples/policies/group_project_namespace_policy_shared_examples.rb
new file mode 100644
index 00000000000..9a1f0e685be
--- /dev/null
+++ b/spec/support/shared_examples/policies/group_project_namespace_policy_shared_examples.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'checks timelog categories permissions' do
+ context 'with no user' do
+ let_it_be(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:read_timelog_category) }
+ end
+
+ context 'with a regular user' do
+ let_it_be(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_timelog_category) }
+ end
+
+ context 'with a reporter user' do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ users_container.add_reporter(current_user)
+ end
+
+ context 'when timelog_categories is enabled' do
+ it { is_expected.to be_allowed(:read_timelog_category) }
+ end
+
+ context 'when timelog_categories is disabled' do
+ before do
+ stub_feature_flags(timelog_categories: false)
+ end
+
+ it { is_expected.to be_disallowed(:read_timelog_category) }
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
index a12cb24a513..32562aef8d2 100644
--- a/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/discussions_shared_examples.rb
@@ -128,10 +128,10 @@ RSpec.shared_examples 'discussions API' do |parent_type, noteable_type, id_name,
stub_feature_flags(notes_create_service_tracking: false)
end
- it 'does not track any events', :snowplow do
+ it 'does not track Notes::CreateService events', :snowplow do
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/discussions"), params: { body: 'hi!' }
- expect_no_snowplow_event
+ expect_no_snowplow_event(category: 'Notes::CreateService', action: 'execute')
end
end
diff --git a/spec/support/shared_examples/requests/api/notes_shared_examples.rb b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
index a59235486ec..8479493911b 100644
--- a/spec/support/shared_examples/requests/api/notes_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/notes_shared_examples.rb
@@ -376,13 +376,28 @@ RSpec.shared_examples 'noteable API with confidential notes' do |parent_type, no
post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params
end
- it "creates a confidential note if confidential is set to true" do
- post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(confidential: true)
+ context 'with internal param' do
+ it "creates a confidential note if internal is set to true" do
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(internal: true)
- expect(response).to have_gitlab_http_status(:created)
- expect(json_response['body']).to eq('hi!')
- expect(json_response['confidential']).to be_truthy
- expect(json_response['author']['username']).to eq(user.username)
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['body']).to eq('hi!')
+ expect(json_response['confidential']).to be_truthy
+ expect(json_response['internal']).to be_truthy
+ expect(json_response['author']['username']).to eq(user.username)
+ end
+ end
+
+ context 'with deprecated confidential param' do
+ it "creates a confidential note if confidential is set to true" do
+ post api("/#{parent_type}/#{parent.id}/#{noteable_type}/#{noteable[id_name]}/notes", user), params: params.merge(confidential: true)
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['body']).to eq('hi!')
+ expect(json_response['confidential']).to be_truthy
+ expect(json_response['internal']).to be_truthy
+ expect(json_response['author']['username']).to eq(user.username)
+ end
end
end
end
diff --git a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
index 8d6d85732be..b651ffc8996 100644
--- a/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/npm_packages_shared_examples.rb
@@ -244,7 +244,7 @@ RSpec.shared_examples 'handling get metadata requests' do |scope: :project|
let(:headers) do
case auth
when :oauth
- build_token_auth_header(token.token)
+ build_token_auth_header(token.plaintext_token)
when :personal_access_token
build_token_auth_header(personal_access_token.token)
when :job_token
@@ -404,7 +404,7 @@ RSpec.shared_examples 'handling get dist tags requests' do |scope: :project|
shared_examples 'handling all conditions' do
context 'with oauth token' do
- let(:headers) { build_token_auth_header(token.token) }
+ let(:headers) { build_token_auth_header(token.plaintext_token) }
it_behaves_like 'handling different package names, visibilities and user roles'
end
@@ -514,7 +514,7 @@ RSpec.shared_examples 'handling create dist tag requests' do |scope: :project|
shared_examples 'handling all conditions' do
context 'with oauth token' do
- let(:headers) { build_token_auth_header(token.token) }
+ let(:headers) { build_token_auth_header(token.plaintext_token) }
it_behaves_like 'handling different package names, visibilities and user roles'
end
@@ -622,7 +622,7 @@ RSpec.shared_examples 'handling delete dist tag requests' do |scope: :project|
shared_examples 'handling all conditions' do
context 'with oauth token' do
- let(:headers) { build_token_auth_header(token.token) }
+ let(:headers) { build_token_auth_header(token.plaintext_token) }
it_behaves_like 'handling different package names, visibilities and user roles'
end
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
index ca86cb082a7..6cae7d8e00f 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_firing_shared_examples.rb
@@ -23,7 +23,7 @@ RSpec.shared_examples 'creates an alert management alert or errors' do
end
context 'and fails to save' do
- let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] })}
+ let(:errors) { double(messages: { hosts: ['hosts array is over 255 chars'] }, '[]': [] )}
before do
allow(service).to receive(:alert).and_call_original
@@ -35,9 +35,10 @@ RSpec.shared_examples 'creates an alert management alert or errors' do
it 'writes a warning to the log' do
expect(Gitlab::AppLogger).to receive(:warn).with(
- message: "Unable to create AlertManagement::Alert from #{source}",
+ message: "Unable to create AlertManagement::Alert",
project_id: project.id,
- alert_errors: { hosts: ['hosts array is over 255 chars'] }
+ alert_errors: { hosts: ['hosts array is over 255 chars'] },
+ alert_source: source
)
subject
@@ -45,6 +46,46 @@ RSpec.shared_examples 'creates an alert management alert or errors' do
end
end
+RSpec.shared_examples 'handles race condition in alert creation' do
+ let(:other_alert) { create(:alert_management_alert, project: project) }
+
+ context 'when another alert is saved at the same time' do
+ before do
+ allow_next_instance_of(::AlertManagement::Alert) do |alert|
+ allow(alert).to receive(:save) do
+ other_alert.update!(fingerprint: alert.fingerprint)
+
+ raise ActiveRecord::RecordNotUnique
+ end
+ end
+ end
+
+ it 'finds the other alert and increments the counter' do
+ subject
+
+ expect(other_alert.reload.events).to eq(2)
+ end
+ end
+
+ context 'when another alert is saved before the validation runes' do
+ before do
+ allow_next_instance_of(::AlertManagement::Alert) do |alert|
+ allow(alert).to receive(:save).and_wrap_original do |method, *args|
+ other_alert.update!(fingerprint: alert.fingerprint)
+
+ method.call(*args)
+ end
+ end
+ end
+
+ it 'finds the other alert and increments the counter' do
+ subject
+
+ expect(other_alert.reload.events).to eq(2)
+ end
+ end
+end
+
# This shared_example requires the following variables:
# - last_alert_attributes, last created alert
# - project, project that alert created
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
index f8e096297d3..eb9f76d8626 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/alert_recovery_shared_examples.rb
@@ -4,8 +4,6 @@
# - `alert`, the alert to be resolved
RSpec.shared_examples 'resolves an existing alert management alert' do
it 'sets the end time and status' do
- expect(Gitlab::AppLogger).not_to receive(:warn)
-
expect { subject }
.to change { alert.reload.resolved? }.to(true)
.and change { alert.ended_at.present? }.to(true)
@@ -22,36 +20,6 @@ RSpec.shared_examples 'does not change the alert end time' do
end
end
-# This shared_example requires the following variables:
-# - `project`, expected project for an incoming alert
-# - `service`, a service which includes AlertManagement::AlertProcessing
-# - `alert` (optional), the alert which should fail to resolve. If not
-# included, the log is expected to correspond to a new alert
-RSpec.shared_examples 'writes a warning to the log for a failed alert status update' do
- before do
- allow(service).to receive(:alert).and_call_original
- allow(service).to receive_message_chain(:alert, :resolve).and_return(false)
- end
-
- specify do
- expect(Gitlab::AppLogger).to receive(:warn).with(
- message: 'Unable to update AlertManagement::Alert status to resolved',
- project_id: project.id,
- alert_id: alert ? alert.id : (last_alert_id + 1)
- )
-
- # Failure to resolve a recovery alert is not a critical failure
- expect(subject).to be_success
- end
-
- private
-
- def last_alert_id
- AlertManagement::Alert.connection
- .select_value("SELECT nextval('#{AlertManagement::Alert.sequence_name}')")
- end
-end
-
RSpec.shared_examples 'processes recovery alert' do
context 'seen for the first time' do
let(:alert) { AlertManagement::Alert.last }
@@ -69,7 +37,6 @@ RSpec.shared_examples 'processes recovery alert' do
it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
it_behaves_like 'sends alert notification emails if enabled'
it_behaves_like 'closes related incident if enabled'
- it_behaves_like 'writes a warning to the log for a failed alert status update'
it_behaves_like 'does not create an alert management alert'
it_behaves_like 'does not process incident issues'
@@ -83,7 +50,6 @@ RSpec.shared_examples 'processes recovery alert' do
it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
it_behaves_like 'sends alert notification emails if enabled'
it_behaves_like 'closes related incident if enabled'
- it_behaves_like 'writes a warning to the log for a failed alert status update'
it_behaves_like 'does not create an alert management alert'
it_behaves_like 'does not process incident issues'
@@ -97,7 +63,6 @@ RSpec.shared_examples 'processes recovery alert' do
it_behaves_like 'creates expected system notes for alert', :recovery_alert, :resolve_alert
it_behaves_like 'sends alert notification emails if enabled'
it_behaves_like 'closes related incident if enabled'
- it_behaves_like 'writes a warning to the log for a failed alert status update'
it_behaves_like 'does not create an alert management alert'
it_behaves_like 'does not process incident issues'
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
index 98834f01ce2..6becc3dc071 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_creation_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# Expects usage of 'incident settings enabled' context.
+# Expects usage of 'incident management settings enabled' context.
#
# This shared_example includes the following option:
# - with_issue: includes a test for when the defined `alert` has an associated issue
@@ -8,7 +8,7 @@
# This shared_example requires the following variables:
# - `alert`, required if :with_issue is true
RSpec.shared_examples 'processes incident issues if enabled' do |with_issue: false|
- include_examples 'processes incident issues', with_issue
+ include_examples 'processes incident issues', with_issue: with_issue
context 'with incident setting disabled' do
let(:create_issue) { false }
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
index 3add5485fca..1973577d742 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/incident_resolution_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-# Expects usage of 'incident settings enabled' context.
+# Expects usage of 'incident management settings enabled' context.
#
# This shared_example requires the following variables:
# - `alert`, alert for which related incidents should be closed
diff --git a/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb b/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb
index 5f30b58176b..92e7dee7533 100644
--- a/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb
+++ b/spec/support/shared_examples/services/alert_management/alert_processing/notifications_shared_examples.rb
@@ -1,11 +1,11 @@
# frozen_string_literal: true
-# Expects usage of 'incident settings enabled' context.
+# Expects usage of 'incident management settings enabled' context.
#
# This shared_example includes the following option:
# - count: number of notifications expected to be sent
RSpec.shared_examples 'sends alert notification emails if enabled' do |count: 1|
- include_examples 'sends alert notification emails', count
+ include_examples 'sends alert notification emails', count: count
context 'with email setting disabled' do
let(:send_email) { false }
diff --git a/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb
index bf84b912610..97d0bae3552 100644
--- a/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_move_service_shared_examples.rb
@@ -1,95 +1,103 @@
# frozen_string_literal: true
RSpec.shared_examples 'lists move service' do
- let!(:planning) { create(:list, board: board, position: 0) }
- let!(:development) { create(:list, board: board, position: 1) }
- let!(:review) { create(:list, board: board, position: 2) }
- let!(:staging) { create(:list, board: board, position: 3) }
- let!(:closed) { create(:closed_list, board: board) }
+ shared_examples 'correct movement behavior' do
+ context 'when list type is set to label' do
+ it 'does not reorder lists when new position is nil' do
+ service = described_class.new(parent, user, position: nil)
- context 'when list type is set to label' do
- it 'keeps position of lists when new position is nil' do
- service = described_class.new(parent, user, position: nil)
+ service.execute(planning)
- service.execute(planning)
+ expect(ordered_lists).to eq([planning, development, review, staging])
+ end
- expect(current_list_positions).to eq [0, 1, 2, 3]
- end
-
- it 'keeps position of lists when new position is equal to old position' do
- service = described_class.new(parent, user, position: planning.position)
+ it 'does not reorder lists when new position is equal to old position' do
+ service = described_class.new(parent, user, position: planning.position)
- service.execute(planning)
+ service.execute(planning)
- expect(current_list_positions).to eq [0, 1, 2, 3]
- end
+ expect(ordered_lists).to eq([planning, development, review, staging])
+ end
- it 'keeps position of lists when new position is negative' do
- service = described_class.new(parent, user, position: -1)
+ it 'does not reorder lists when new position is negative' do
+ service = described_class.new(parent, user, position: -1)
- service.execute(planning)
+ service.execute(planning)
- expect(current_list_positions).to eq [0, 1, 2, 3]
- end
+ expect(ordered_lists).to eq([planning, development, review, staging])
+ end
- it 'keeps position of lists when new position is equal to number of labels lists' do
- service = described_class.new(parent, user, position: board.lists.label.size)
+ it 'does not reorder lists when new position is bigger then last position' do
+ service = described_class.new(parent, user, position: ordered_lists.last.position + 1)
- service.execute(planning)
+ service.execute(planning)
- expect(current_list_positions).to eq [0, 1, 2, 3]
- end
+ expect(ordered_lists).to eq([planning, development, review, staging])
+ end
- it 'keeps position of lists when new position is greater than number of labels lists' do
- service = described_class.new(parent, user, position: board.lists.label.size + 1)
+ it 'moves the list to the first position when new position is equal to first position' do
+ service = described_class.new(parent, user, position: 0)
- service.execute(planning)
+ service.execute(staging)
- expect(current_list_positions).to eq [0, 1, 2, 3]
- end
+ expect(ordered_lists).to eq([staging, planning, development, review])
+ end
- it 'increments position of intermediate lists when new position is equal to first position' do
- service = described_class.new(parent, user, position: 0)
+ it 'moves the list to the last position when new position is equal to last position' do
+ service = described_class.new(parent, user, position: board.lists.label.last.position)
- service.execute(staging)
+ service.execute(planning)
- expect(current_list_positions).to eq [1, 2, 3, 0]
- end
+ expect(ordered_lists).to eq([development, review, staging, planning])
+ end
- it 'decrements position of intermediate lists when new position is equal to last position' do
- service = described_class.new(parent, user, position: board.lists.label.last.position)
+ it 'moves the list to the correct position when new position is greater than old position (third list)' do
+ service = described_class.new(parent, user, position: review.position)
- service.execute(planning)
+ service.execute(planning)
- expect(current_list_positions).to eq [3, 0, 1, 2]
- end
+ expect(ordered_lists).to eq([development, review, planning, staging])
+ end
- it 'decrements position of intermediate lists when new position is greater than old position' do
- service = described_class.new(parent, user, position: 2)
+ it 'moves the list to the correct position when new position is lower than old position (second list)' do
+ service = described_class.new(parent, user, position: development.position)
- service.execute(planning)
+ service.execute(staging)
- expect(current_list_positions).to eq [2, 0, 1, 3]
+ expect(ordered_lists).to eq([planning, staging, development, review])
+ end
end
- it 'increments position of intermediate lists when new position is lower than old position' do
- service = described_class.new(parent, user, position: 1)
+ it 'keeps position of lists when list type is closed' do
+ service = described_class.new(parent, user, position: 2)
- service.execute(staging)
+ service.execute(closed)
- expect(current_list_positions).to eq [0, 2, 3, 1]
+ expect(ordered_lists).to eq([planning, development, review, staging])
end
end
- it 'keeps position of lists when list type is closed' do
- service = described_class.new(parent, user, position: 2)
+ context 'with complete position sequence' do
+ let!(:planning) { create(:list, board: board, position: 0) }
+ let!(:development) { create(:list, board: board, position: 1) }
+ let!(:review) { create(:list, board: board, position: 2) }
+ let!(:staging) { create(:list, board: board, position: 3) }
+ let!(:closed) { create(:closed_list, board: board) }
+
+ it_behaves_like 'correct movement behavior'
+ end
- service.execute(closed)
+ context 'with corrupted position sequence' do
+ let!(:planning) { create(:list, board: board, position: 0) }
+ let!(:staging) { create(:list, board: board, position: 6) }
+ let!(:development) { create(:list, board: board, position: 1) }
+ let!(:review) { create(:list, board: board, position: 4) }
+ let!(:closed) { create(:closed_list, board: board) }
- expect(current_list_positions).to eq [0, 1, 2, 3]
+ it_behaves_like 'correct movement behavior'
end
- def current_list_positions
- [planning, development, review, staging].map { |list| list.reload.position }
+ def ordered_lists
+ board.lists.where.not(position: nil)
end
end
diff --git a/spec/support/shared_examples/services/issuable_shared_examples.rb b/spec/support/shared_examples/services/issuable_shared_examples.rb
index a50a386afe1..142d4ae8531 100644
--- a/spec/support/shared_examples/services/issuable_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_shared_examples.rb
@@ -45,7 +45,7 @@ RSpec.shared_examples 'updating a single task' do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 1** as completed')
+ note1 = find_note('marked the checklist item **Task 1** as completed')
expect(note1).not_to be_nil
@@ -61,7 +61,7 @@ RSpec.shared_examples 'updating a single task' do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 2** as incomplete')
+ note1 = find_note('marked the checklist item **Task 2** as incomplete')
expect(note1).not_to be_nil
@@ -92,7 +92,7 @@ RSpec.shared_examples 'updating a single task' do
end
it 'creates system note about task status change' do
- note1 = find_note('marked the task **Task 2** as incomplete')
+ note1 = find_note('marked the checklist item **Task 2** as incomplete')
expect(note1).not_to be_nil
diff --git a/spec/support/shared_examples/services/packages_shared_examples.rb b/spec/support/shared_examples/services/packages_shared_examples.rb
index 6bc4f171d9c..704a4bbe0b8 100644
--- a/spec/support/shared_examples/services/packages_shared_examples.rb
+++ b/spec/support/shared_examples/services/packages_shared_examples.rb
@@ -81,6 +81,26 @@ RSpec.shared_examples 'returns packages' do |container_type, user_type|
end
end
+RSpec.shared_examples 'returns package' do |container_type, user_type|
+ context "for #{user_type}" do
+ before do
+ send(container_type)&.send("add_#{user_type}", user) unless user_type == :no_type
+ end
+
+ it 'returns success response' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:success)
+ end
+
+ it 'returns a valid response schema' do
+ subject
+
+ expect(response).to match_response_schema(single_package_schema)
+ end
+ end
+end
+
RSpec.shared_examples 'returns packages with subgroups' do |container_type, user_type|
context "with subgroups for #{user_type}" do
before do
diff --git a/spec/support/shared_examples/services/snowplow_tracking_shared_examples.rb b/spec/support/shared_examples/services/snowplow_tracking_shared_examples.rb
new file mode 100644
index 00000000000..0687be6f429
--- /dev/null
+++ b/spec/support/shared_examples/services/snowplow_tracking_shared_examples.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+shared_examples 'issue_edit snowplow tracking' do
+ let(:category) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_CATEGORY }
+ let(:action) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_ACTION }
+ let(:label) { Gitlab::UsageDataCounters::IssueActivityUniqueCounter::ISSUE_LABEL }
+ let(:namespace) { project.namespace }
+ let(:feature_flag_name) { :route_hll_to_snowplow_phase2 }
+
+ it_behaves_like 'Snowplow event tracking'
+end
diff --git a/spec/support/shared_examples/services/timelogs/create_service_shared_examples.rb b/spec/support/shared_examples/services/timelogs/create_service_shared_examples.rb
new file mode 100644
index 00000000000..53c42ec0e00
--- /dev/null
+++ b/spec/support/shared_examples/services/timelogs/create_service_shared_examples.rb
@@ -0,0 +1,89 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issuable supports timelog creation service' do
+ shared_examples 'success_response' do
+ it 'sucessfully saves the timelog' do
+ is_expected.to be_success
+
+ timelog = subject.payload[:timelog]
+
+ expect(timelog).to be_persisted
+ expect(timelog.time_spent).to eq(time_spent)
+ expect(timelog.spent_at).to eq('Fri, 08 Jul 2022 00:00:00.000000000 UTC +00:00')
+ expect(timelog.summary).to eq(summary)
+ expect(timelog.issuable).to eq(issuable)
+ end
+ end
+
+ context 'when the user does not have permission' do
+ let(:user) { create(:user) }
+
+ it 'returns an error' do
+ is_expected.to be_error
+
+ expect(subject.message).to eq(
+ "#{issuable.base_class_name} doesn't exist or you don't have permission to add timelog to it.")
+ expect(subject.http_status).to eq(404)
+ end
+ end
+
+ context 'when the user has permissions' do
+ let(:user) { author }
+
+ before do
+ users_container.add_reporter(user)
+ end
+
+ context 'when the timelog save fails' do
+ before do
+ allow_next_instance_of(Timelog) do |timelog|
+ allow(timelog).to receive(:save).and_return(false)
+ end
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Failed to save timelog')
+ end
+ end
+
+ context 'when the creation completes sucessfully' do
+ it_behaves_like 'success_response'
+ end
+ end
+end
+
+RSpec.shared_examples 'issuable does not support timelog creation service' do
+ shared_examples 'error_response' do
+ it 'returns an error' do
+ is_expected.to be_error
+
+ issuable_type = if issuable.nil?
+ 'Issuable'
+ else
+ issuable.base_class_name
+ end
+
+ expect(subject.message).to eq(
+ "#{issuable_type} doesn't exist or you don't have permission to add timelog to it."
+ )
+ expect(subject.http_status).to eq(404)
+ end
+ end
+
+ context 'when the user does not have permission' do
+ let(:user) { create(:user) }
+
+ it_behaves_like 'error_response'
+ end
+
+ context 'when the user has permissions' do
+ let(:user) { author }
+
+ before do
+ users_container.add_reporter(user)
+ end
+
+ it_behaves_like 'error_response'
+ end
+end
diff --git a/spec/support/shared_examples/services/work_items/create_task_shared_examples.rb b/spec/support/shared_examples/services/work_items/create_task_shared_examples.rb
new file mode 100644
index 00000000000..7771e7f0e21
--- /dev/null
+++ b/spec/support/shared_examples/services/work_items/create_task_shared_examples.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'title with extra spaces' do
+ context 'when title has extra spaces' do
+ before do
+ params[:title] = " Awesome work item "
+ end
+
+ it 'removes extra leading and trailing whitespaces from title' do
+ subject
+
+ created_work_item = WorkItem.last
+ expect(created_work_item.title).to eq('Awesome work item')
+ end
+ end
+end
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 1da21633504..3ba5f080a01 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database|
+RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_database, table_name|
include ExclusiveLeaseHelpers
describe 'defining the job attributes' do
@@ -136,8 +136,10 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
let(:job_interval) { 5.minutes }
let(:lease_timeout) { 15.minutes }
let(:lease_key) { described_class.name.demodulize.underscore }
- let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
let(:interval_variance) { described_class::INTERVAL_VARIANCE }
+ let(:migration) do
+ build(:batched_background_migration, :active, interval: job_interval, table_name: table_name)
+ end
before do
allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
@@ -233,7 +235,9 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
let(:migration_class) do
Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
- def perform(matching_status)
+ job_arguments :matching_status
+
+ def perform
each_sub_batch(
operation_name: :update_all,
batching_scope: -> (relation) { relation.where(status: matching_status) }
@@ -249,7 +253,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
create(
:batched_background_migration,
:active,
- table_name: table_name,
+ table_name: new_table_name,
column_name: :id,
max_value: migration_records,
batch_size: batch_size,
@@ -261,14 +265,14 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
let(:base_model) { Gitlab::Database.database_base_models[tracking_database] }
- let(:table_name) { 'example_data' }
+ let(:new_table_name) { '_test_example_data' }
let(:batch_size) { 5 }
let(:sub_batch_size) { 2 }
let(:number_of_batches) { 10 }
let(:migration_records) { batch_size * number_of_batches }
let(:connection) { Gitlab::Database.database_base_models[tracking_database].connection }
- let(:example_data) { define_batchable_model(table_name, connection: connection) }
+ let(:example_data) { define_batchable_model(new_table_name, connection: connection) }
around do |example|
Gitlab::Database::SharedModel.using_connection(connection) do
@@ -283,16 +287,16 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
# - one record beyond the migration's range
# - one record that doesn't match the migration job's batch condition
connection.execute(<<~SQL)
- CREATE TABLE #{table_name} (
+ CREATE TABLE #{new_table_name} (
id integer primary key,
some_column integer,
status smallint);
- INSERT INTO #{table_name} (id, some_column, status)
+ INSERT INTO #{new_table_name} (id, some_column, status)
SELECT generate_series, generate_series, 1
FROM generate_series(1, #{migration_records + 1});
- UPDATE #{table_name}
+ UPDATE #{new_table_name}
SET status = 0
WHERE some_column = #{migration_records - 5};
SQL
@@ -362,6 +366,15 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
expect { migration_run }.to change { migration.reload.on_hold? }.from(false).to(true)
end
+
+ it 'puts migration on hold when the pending WAL count is above the limit' do
+ sql = Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog::PENDING_WAL_COUNT_SQL
+ limit = Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::WriteAheadLog::LIMIT
+
+ expect(connection).to receive(:execute).with(sql).and_return([{ 'pending_wal_count' => limit + 1 }])
+
+ expect { migration_run }.to change { migration.reload.on_hold? }.from(false).to(true)
+ end
end
end
end
diff --git a/spec/support_specs/database/prevent_cross_joins_spec.rb b/spec/support_specs/database/prevent_cross_joins_spec.rb
index efeabd15b58..5a80d0c0203 100644
--- a/spec/support_specs/database/prevent_cross_joins_spec.rb
+++ b/spec/support_specs/database/prevent_cross_joins_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Database::PreventCrossJoins do
+RSpec.describe Database::PreventCrossJoins, :suppress_gitlab_schemas_validate_connection do
context 'when running in a default scope' do
context 'when only non-CI tables are used' do
it 'does not raise exception' do
diff --git a/spec/support_specs/helpers/redis_commands/recorder_spec.rb b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
new file mode 100644
index 00000000000..6f93ed2fcf0
--- /dev/null
+++ b/spec/support_specs/helpers/redis_commands/recorder_spec.rb
@@ -0,0 +1,128 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe RedisCommands::Recorder, :use_clean_rails_redis_caching do
+ subject(:recorder) { described_class.new(pattern: pattern) }
+
+ let(:cache) { Rails.cache }
+ let(:pattern) { nil }
+
+ describe '#initialize' do
+ context 'with a block' do
+ it 'records Redis commands' do
+ recorder = described_class.new { cache.read('key1') }
+
+ expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
+ end
+ end
+
+ context 'without block' do
+ it 'only initializes the recorder' do
+ recorder = described_class.new
+
+ expect(recorder.log).to eq([])
+ end
+ end
+ end
+
+ describe '#record' do
+ it 'records Redis commands' do
+ recorder.record do
+ cache.write('key1', '1')
+ cache.read('key1')
+ cache.read('key2')
+ cache.delete('key1')
+ end
+
+ expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything])
+ expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
+ expect(recorder.log).to include([:get, 'cache:gitlab:key2'])
+ expect(recorder.log).to include([:del, 'cache:gitlab:key1'])
+ end
+
+ it 'does not record commands before the call' do
+ cache.write('key1', 1)
+
+ recorder.record do
+ cache.read('key1')
+ end
+
+ expect(recorder.log).not_to include([:set, anything, anything])
+ expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
+ end
+
+ it 'refreshes recording after reinitialization' do
+ cache.read('key1')
+
+ recorder1 = described_class.new
+ recorder1.record do
+ cache.read('key2')
+ end
+
+ recorder2 = described_class.new
+
+ cache.read('key3')
+
+ recorder2.record do
+ cache.read('key4')
+ end
+
+ expect(recorder1.log).to include([:get, 'cache:gitlab:key2'])
+ expect(recorder1.log).not_to include([:get, 'cache:gitlab:key1'])
+ expect(recorder1.log).not_to include([:get, 'cache:gitlab:key3'])
+ expect(recorder1.log).not_to include([:get, 'cache:gitlab:key4'])
+
+ expect(recorder2.log).to include([:get, 'cache:gitlab:key4'])
+ expect(recorder2.log).not_to include([:get, 'cache:gitlab:key1'])
+ expect(recorder2.log).not_to include([:get, 'cache:gitlab:key2'])
+ expect(recorder2.log).not_to include([:get, 'cache:gitlab:key3'])
+ end
+ end
+
+ describe 'Pattern recording' do
+ let(:pattern) { 'key1' }
+
+ it 'records only matching keys' do
+ recorder.record do
+ cache.write('key1', '1')
+ cache.read('key2')
+ cache.read('key1')
+ cache.delete('key2')
+ end
+
+ expect(recorder.log).to include([:set, 'cache:gitlab:key1', anything])
+ expect(recorder.log).to include([:get, 'cache:gitlab:key1'])
+ expect(recorder.log).not_to include([:get, 'cache:gitlab:key2'])
+ expect(recorder.log).not_to include([:del, 'cache:gitlab:key2'])
+ end
+ end
+
+ describe '#by_command' do
+ it 'returns only matching commands' do
+ recorder.record do
+ cache.write('key1', '1')
+ cache.read('key2')
+ cache.read('key1')
+ cache.delete('key2')
+ end
+
+ expect(recorder.by_command(:del)).to match_array([[:del, 'cache:gitlab:key2']])
+ end
+ end
+
+ describe '#count' do
+ it 'returns the number of recorded commands' do
+ cache.read 'warmup'
+
+ recorder.record do
+ cache.write('key1', '1')
+ cache.read('key2')
+ cache.read('key1')
+ cache.delete('key2')
+ end
+
+ expect(recorder.count).to eq(4)
+ end
+ end
+end
diff --git a/spec/tasks/dev_rake_spec.rb b/spec/tasks/dev_rake_spec.rb
index 14a5ccfa323..a09756b862e 100644
--- a/spec/tasks/dev_rake_spec.rb
+++ b/spec/tasks/dev_rake_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'dev rake tasks' do
Rake.application.rake_require 'tasks/dev'
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/db'
+ Rake.application.rake_require 'tasks/seed_fu'
end
describe 'setup' do
@@ -38,6 +39,30 @@ RSpec.describe 'dev rake tasks' do
end
end
+ describe 'fixtures:load' do
+ subject(:load_task) { run_rake_task('dev:fixtures:load', task_param) }
+
+ context 'by name' do
+ let(:task_param) { ['fixture_name'] }
+
+ it 'loads fixture' do
+ expect(Rake::Task['db:seed_fu']).to receive(:invoke)
+
+ load_task
+ end
+ end
+
+ context 'by empty name' do
+ let(:task_param) { '' }
+
+ it 'does not load fixture' do
+ expect(Rake::Task['db:seed_fu']).not_to receive(:invoke)
+
+ expect { load_task }.to output(/No fixture name was provided/).to_stdout
+ end
+ end
+ end
+
describe 'load' do
subject(:load_task) { run_rake_task('dev:load') }
diff --git a/spec/tasks/gitlab/background_migrations_rake_spec.rb b/spec/tasks/gitlab/background_migrations_rake_spec.rb
index bbd33f71e60..d8ce00a65e6 100644
--- a/spec/tasks/gitlab/background_migrations_rake_spec.rb
+++ b/spec/tasks/gitlab/background_migrations_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:background_migrations namespace rake tasks' do
+RSpec.describe 'gitlab:background_migrations namespace rake tasks', :suppress_gitlab_schemas_validate_connection do
before do
Rake.application.rake_require 'tasks/gitlab/background_migrations'
end
@@ -155,7 +155,7 @@ RSpec.describe 'gitlab:background_migrations namespace rake tasks' do
context 'with multiple databases' do
subject(:status_task) { run_rake_task('gitlab:background_migrations:status') }
- let(:base_models) { { 'main' => main_model, 'ci' => ci_model } }
+ let(:base_models) { { main: main_model, ci: ci_model } }
let(:main_model) { double(:model, connection: connection) }
let(:ci_model) { double(:model, connection: connection) }
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 9e914f8202e..dc112b885ae 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -5,7 +5,11 @@ require 'rake_helper'
RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:enable_registry) { true }
let(:backup_tasks) { %w{db repo uploads builds artifacts pages lfs terraform_state registry packages} }
- let(:backup_types) { %w{db repositories uploads builds artifacts pages lfs terraform_state registry packages} }
+ let(:backup_types) do
+ %w{main_db repositories uploads builds artifacts pages lfs terraform_state registry packages}.tap do |array|
+ array.insert(1, 'ci_db') if Gitlab::Database.has_config?(:ci)
+ end
+ end
def tars_glob
Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar'))
@@ -151,7 +155,8 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
describe 'backup' do
before do
# This reconnect makes our project fixture disappear, breaking the restore. Stub it out.
- allow(ActiveRecord::Base.connection).to receive(:reconnect!)
+ allow(ApplicationRecord.connection).to receive(:reconnect!)
+ allow(Ci::ApplicationRecord.connection).to receive(:reconnect!)
end
let!(:project) { create(:project, :repository) }
@@ -199,7 +204,9 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
it 'logs the progress to log file' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping database ... [SKIPPED]")
+ ci_database_status = Gitlab::Database.has_config?(:ci) ? "[SKIPPED]" : "[DISABLED]"
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping main_database ... [SKIPPED]")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci_database ... #{ci_database_status}")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... ")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... done")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... ")
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
index 29b80176ef8..b03e964ce87 100644
--- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -2,7 +2,8 @@
require 'rake_helper'
-RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout do
+RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout,
+ :suppress_gitlab_schemas_validate_connection do
before :all do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index 034c520887e..d03e15224cb 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -2,7 +2,8 @@
require 'rake_helper'
-RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_record_base do
+RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_record_base,
+ :suppress_gitlab_schemas_validate_connection do
before :all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
@@ -48,26 +49,6 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
end
context 'when locking writes' do
- it 'adds 3 triggers to the ci schema tables on the main database' do
- expect do
- run_rake_task('gitlab:db:lock_writes')
- end.to change {
- number_of_triggers_on(main_connection, Ci::Build.table_name)
- }.by(3) # Triggers to block INSERT / UPDATE / DELETE
- # Triggers on TRUNCATE are not added to the information_schema.triggers
- # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
- end
-
- it 'adds 3 triggers to the main schema tables on the ci database' do
- expect do
- run_rake_task('gitlab:db:lock_writes')
- end.to change {
- number_of_triggers_on(ci_connection, Project.table_name)
- }.by(3) # Triggers to block INSERT / UPDATE / DELETE
- # Triggers on TRUNCATE are not added to the information_schema.triggers
- # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
- end
-
it 'still allows writes on the tables with the correct connections' do
Project.update_all(updated_at: Time.now)
Ci::Build.update_all(updated_at: Time.now)
@@ -106,30 +87,22 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
main_connection.execute("truncate ci_build_needs")
end.to raise_error(ActiveRecord::StatementInvalid, /Table: "ci_build_needs" is write protected/)
end
+ end
- it 'retries again if it receives a statement_timeout a few number of times' do
- error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
- call_count = 0
- allow(main_connection).to receive(:execute) do |statement|
- if statement.include?("CREATE TRIGGER")
- call_count += 1
- raise(ActiveRecord::QueryCanceled, error_message) if call_count.even?
- end
- end
- run_rake_task('gitlab:db:lock_writes')
+ context 'multiple shared databases' do
+ before do
+ allow(::Gitlab::Database).to receive(:db_config_share_with).and_return(nil)
+ ci_db_config = Ci::ApplicationRecord.connection_db_config
+ allow(::Gitlab::Database).to receive(:db_config_share_with).with(ci_db_config).and_return('main')
end
- it 'raises the exception if it happened many times' do
- error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
- allow(main_connection).to receive(:execute) do |statement|
- if statement.include?("CREATE TRIGGER")
- raise(ActiveRecord::QueryCanceled, error_message)
- end
- end
+ it 'does not lock any tables if the ci database is shared with main database' do
+ run_rake_task('gitlab:db:lock_writes')
expect do
- run_rake_task('gitlab:db:lock_writes')
- end.to raise_error(ActiveRecord::QueryCanceled)
+ ApplicationRecord.connection.execute("delete from ci_builds")
+ Ci::ApplicationRecord.connection.execute("delete from users")
+ end.not_to raise_error
end
end
@@ -138,26 +111,8 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
run_rake_task('gitlab:db:lock_writes')
end
- it 'removes the write protection triggers from the gitlab_main tables on the ci database' do
- expect do
- run_rake_task('gitlab:db:unlock_writes')
- end.to change {
- number_of_triggers_on(ci_connection, Project.table_name)
- }.by(-3) # Triggers to block INSERT / UPDATE / DELETE
- # Triggers on TRUNCATE are not added to the information_schema.triggers
- # See https://www.postgresql.org/message-id/16934.1568989957%40sss.pgh.pa.us
-
- expect do
- ci_connection.execute("delete from projects")
- end.not_to raise_error
- end
-
- it 'removes the write protection triggers from the gitlab_ci tables on the main database' do
- expect do
- run_rake_task('gitlab:db:unlock_writes')
- end.to change {
- number_of_triggers_on(main_connection, Ci::Build.table_name)
- }.by(-3)
+ it 'allows writes again on the gitlab_ci tables on the main database' do
+ run_rake_task('gitlab:db:unlock_writes')
expect do
main_connection.execute("delete from ci_builds")
@@ -169,9 +124,4 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
def number_of_triggers(connection)
connection.select_value("SELECT count(*) FROM information_schema.triggers")
end
-
- def number_of_triggers_on(connection, table_name)
- connection
- .select_value("SELECT count(*) FROM information_schema.triggers WHERE event_object_table=$1", nil, [table_name])
- end
end
diff --git a/spec/tasks/gitlab/db/validate_config_rake_spec.rb b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
index 03d7504e8b1..ad15c7f0d1c 100644
--- a/spec/tasks/gitlab/db/validate_config_rake_spec.rb
+++ b/spec/tasks/gitlab/db/validate_config_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:db:validate_config', :silence_stdout do
+RSpec.describe 'gitlab:db:validate_config', :silence_stdout, :suppress_gitlab_schemas_validate_connection do
# We don't need to delete this data since it only modifies `ar_internal_metadata`
# which would not be cleaned either by `DbCleaner`
self.use_transactional_tests = false
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 74bec406947..8f8178cde4d 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -370,7 +370,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
it 'outputs changed message for automation after operations happen' do
allow(ActiveRecord::Base.connection.schema_migration).to receive(:table_exists?).and_return(schema_migration_table_exists)
allow_any_instance_of(ActiveRecord::MigrationContext).to receive(:needs_migration?).and_return(needs_migrations)
- expect { run_rake_task('gitlab:db:unattended') }. to output(/^#{rake_output}$/).to_stdout
+ expect { run_rake_task('gitlab:db:unattended') }.to output(/^#{rake_output}$/).to_stdout
end
end
end
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index 70c7ddb1d6e..e57021f749b 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
.with(%w[which gmake])
.and_return(['/usr/bin/gmake', 0])
expect(Gitlab::Popen).to receive(:popen)
- .with(%w[gmake all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
+ .with(%w[gmake clean-build all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
.and_return(['ok', 0])
subject
@@ -78,7 +78,7 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
.with(%w[which gmake])
.and_return(['/usr/bin/gmake', 0])
expect(Gitlab::Popen).to receive(:popen)
- .with(%w[gmake all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
+ .with(%w[gmake clean-build all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
.and_return(['output', 1])
expect { subject }.to raise_error /Gitaly failed to compile: output/
@@ -95,14 +95,14 @@ RSpec.describe 'gitlab:gitaly namespace rake task', :silence_stdout do
it 'calls make in the gitaly directory' do
expect(Gitlab::Popen).to receive(:popen)
- .with(%w[make all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
+ .with(%w[make clean-build all git], nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil })
.and_return(['output', 0])
subject
end
context 'when Rails.env is test' do
- let(:command) { %w[make all git] }
+ let(:command) { %w[make clean-build all git] }
before do
stub_rails_env('test')
diff --git a/spec/tasks/gitlab/password_rake_spec.rb b/spec/tasks/gitlab/password_rake_spec.rb
index 65bba836024..5d5e5af2536 100644
--- a/spec/tasks/gitlab/password_rake_spec.rb
+++ b/spec/tasks/gitlab/password_rake_spec.rb
@@ -3,7 +3,8 @@
require 'rake_helper'
RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
- let_it_be(:user_1) { create(:user, username: 'foobar', password: 'initial_password') }
+ let_it_be(:user_1) { create(:user, username: 'foobar', password: User.random_password) }
+ let_it_be(:password) { User.random_password }
def stub_username(username)
allow(Gitlab::TaskHelpers).to receive(:prompt).with('Enter username: ').and_return(username)
@@ -19,14 +20,14 @@ RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
Rake.application.rake_require 'tasks/gitlab/password'
stub_username('foobar')
- stub_password('secretpassword')
+ stub_password(password)
end
describe ':reset' do
context 'when all inputs are correct' do
it 'updates the password properly' do
run_rake_task('gitlab:password:reset', user_1.username)
- expect(user_1.reload.valid_password?('secretpassword')).to eq(true)
+ expect(user_1.reload.valid_password?(password)).to eq(true)
end
end
@@ -55,7 +56,7 @@ RSpec.describe 'gitlab:password rake tasks', :silence_stdout do
context 'when passwords do not match' do
before do
- stub_password('randompassword', 'differentpassword')
+ stub_password(password, User.random_password)
end
it 'aborts with an error' do
diff --git a/spec/tasks/gitlab/web_hook_rake_spec.rb b/spec/tasks/gitlab/web_hook_rake_spec.rb
index 2c582dc78f8..cb6a6e72ab1 100644
--- a/spec/tasks/gitlab/web_hook_rake_spec.rb
+++ b/spec/tasks/gitlab/web_hook_rake_spec.rb
@@ -50,6 +50,10 @@ RSpec.describe 'gitlab:web_hook namespace rake tasks', :silence_stdout do
let(:other_url) { 'http://other.example.com' }
+ it 'complains if URL is not provided' do
+ expect { run_rake_task('gitlab:web_hook:rm') }.to raise_error(ArgumentError, 'URL is required')
+ end
+
it 'removes a web hook from all projects by URL' do
stub_env('URL' => url)
run_rake_task('gitlab:web_hook:rm')
diff --git a/spec/tooling/danger/customer_success_spec.rb b/spec/tooling/danger/customer_success_spec.rb
new file mode 100644
index 00000000000..798905212f1
--- /dev/null
+++ b/spec/tooling/danger/customer_success_spec.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+require 'rspec-parameterized'
+require 'gitlab-dangerfiles'
+require 'gitlab/dangerfiles/spec_helper'
+require_relative '../../../tooling/danger/customer_success'
+
+RSpec.describe Tooling::Danger::CustomerSuccess do
+ include_context "with dangerfile"
+
+ let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
+ let(:customer_success) { fake_danger.new(helper: fake_helper) }
+
+ describe 'customer success danger' do
+ using RSpec::Parameterized::TableSyntax
+
+ where do
+ {
+ 'with data category changes to Ops and no Customer Success::Impact Check label' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['-data_category: cat1', '+data_category: operational'],
+ customer_labeled: false,
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with data category changes and Customer Success::Impact Check label' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml),
+ changed_lines: ['-data_category: cat1', '+data_category: operational'],
+ customer_labeled: true,
+ impacted: false,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with metric file changes and no data category changes' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml),
+ changed_lines: ['-product_stage: growth'],
+ customer_labeled: false,
+ impacted: false,
+ impacted_files: []
+ },
+ 'with data category changes from Ops' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['-data_category: operational', '+data_category: cat2'],
+ customer_labeled: false,
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with data category removed' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['-data_category: operational'],
+ customer_labeled: false,
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with data category added' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['+data_category: operational'],
+ customer_labeled: false,
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ },
+ 'with data category in uppercase' => {
+ modified_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml app/models/user.rb),
+ changed_lines: ['+data_category: Operational'],
+ customer_labeled: false,
+ impacted: true,
+ impacted_files: %w(config/metrics/20210216182127_user_secret_detection_jobs.yml)
+ }
+ }
+ end
+
+ with_them do
+ before do
+ allow(fake_helper).to receive(:modified_files).and_return(modified_files)
+ allow(fake_helper).to receive(:changed_lines).and_return(changed_lines)
+ allow(fake_helper).to receive(:has_scoped_label_with_scope?).and_return(customer_labeled)
+ allow(fake_helper).to receive(:markdown_list).with(impacted_files)
+ .and_return(impacted_files.map { |item| "* `#{item}`" }.join("\n"))
+ end
+
+ it 'generates correct message' do
+ expect(customer_success.build_message).to match_expected_message
+ end
+ end
+ end
+
+ def match_expected_message
+ return be_nil unless impacted
+
+ start_with(described_class::CHANGED_SCHEMA_MESSAGE).and(include(*impacted_files))
+ end
+end
diff --git a/spec/tooling/graphql/docs/renderer_spec.rb b/spec/tooling/graphql/docs/renderer_spec.rb
index 18256fea2d6..bf2383507aa 100644
--- a/spec/tooling/graphql/docs/renderer_spec.rb
+++ b/spec/tooling/graphql/docs/renderer_spec.rb
@@ -347,6 +347,128 @@ RSpec.describe Tooling::Graphql::Docs::Renderer do
it_behaves_like 'renders correctly as GraphQL documentation'
end
+ context 'when an argument is in alpha' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name 'AlphaTest'
+ description 'A thing with arguments in alpha'
+
+ field :foo,
+ type: GraphQL::Types::String,
+ null: false,
+ description: 'A description.' do
+ argument :foo_arg, GraphQL::Types::String,
+ required: false,
+ description: 'Argument description.',
+ alpha: { milestone: '101.2' }
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ##### `AlphaTest.foo`
+
+ A description.
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="alphatestfoofooarg"></a>`fooArg` **{warning-solid}** | [`String`](#string) | **Introduced** in 101.2. This feature is in Alpha. It can be changed or removed at any time. Argument description. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
+ context 'when a field is in alpha' do
+ let(:type) do
+ Class.new(Types::BaseObject) do
+ graphql_name 'AlphaTest'
+ description 'A thing with fields in alpha'
+
+ field :foo,
+ type: GraphQL::Types::String,
+ null: false,
+ alpha: { milestone: '1.10' },
+ description: 'A description.'
+ field :foo_with_args,
+ type: GraphQL::Types::String,
+ null: false,
+ alpha: { milestone: '1.10' },
+ description: 'A description.' do
+ argument :arg, GraphQL::Types::Int, required: false, description: 'Argity'
+ end
+ end
+ end
+
+ let(:section) do
+ <<~DOC
+ ### `AlphaTest`
+
+ A thing with fields in alpha.
+
+ #### Fields
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="alphatestfoo"></a>`foo` **{warning-solid}** | [`String!`](#string) | **Introduced** in 1.10. This feature is in Alpha. It can be changed or removed at any time. A description. |
+
+ #### Fields with arguments
+
+ ##### `AlphaTest.fooWithArgs`
+
+ A description.
+
+ WARNING:
+ **Introduced** in 1.10.
+ This feature is in Alpha. It can be changed or removed at any time.
+
+ Returns [`String!`](#string).
+
+ ###### Arguments
+
+ | Name | Type | Description |
+ | ---- | ---- | ----------- |
+ | <a id="alphatestfoowithargsarg"></a>`arg` | [`Int`](#int) | Argity. |
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
+ context 'when a Query.field is in alpha' do
+ before do
+ query_type.field(
+ name: :bar,
+ type: type,
+ null: true,
+ description: 'A bar',
+ alpha: { milestone: '10.11' }
+ )
+ end
+
+ let(:type) { ::GraphQL::Types::Int }
+ let(:section) do
+ <<~DOC
+ ### `Query.bar`
+
+ A bar.
+
+ WARNING:
+ **Introduced** in 10.11.
+ This feature is in Alpha. It can be changed or removed at any time.
+
+ Returns [`Int`](#int).
+ DOC
+ end
+
+ it_behaves_like 'renders correctly as GraphQL documentation'
+ end
+
context 'when a field has an Enumeration type' do
let(:type) do
enum_type = Class.new(Types::BaseEnum) do
diff --git a/spec/tooling/lib/tooling/find_codeowners_spec.rb b/spec/tooling/lib/tooling/find_codeowners_spec.rb
index 10c2a076847..5f6f83ab2c7 100644
--- a/spec/tooling/lib/tooling/find_codeowners_spec.rb
+++ b/spec/tooling/lib/tooling/find_codeowners_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Tooling::FindCodeowners do
allow(subject).to receive(:load_config).and_return(
'[Section name]': {
'@group': {
+ entries: %w[whatever entries],
allow: {
keywords: %w[dir0 file],
patterns: ['/%{keyword}/**/*', '/%{keyword}']
@@ -31,8 +32,11 @@ RSpec.describe Tooling::FindCodeowners do
end
end.to output(<<~CODEOWNERS).to_stdout
[Section name]
+ whatever @group
+ entries @group
/dir0/dir1/ @group
/file @group
+
CODEOWNERS
end
end
@@ -57,21 +61,33 @@ RSpec.describe Tooling::FindCodeowners do
patterns: ['%{keyword}']
}
}
+ },
+ '[Compliance]': {
+ '@gitlab-org/manage/compliance': {
+ entries: %w[
+ /ee/app/services/audit_events/build_service.rb
+ ],
+ allow: {
+ patterns: %w[
+ /ee/app/services/audit_events/*
+ ]
+ }
+ }
}
}
)
end
it 'expands the allow and deny list with keywords and patterns' do
- subject.load_definitions.each do |section, group_defintions|
- group_defintions.each do |group, definitions|
- expect(definitions[:allow]).to be_an(Array)
- expect(definitions[:deny]).to be_an(Array)
- end
+ group_defintions = subject.load_definitions[:'[Authentication and Authorization]']
+
+ group_defintions.each do |group, definitions|
+ expect(definitions[:allow]).to be_an(Array)
+ expect(definitions[:deny]).to be_an(Array)
end
end
- it 'expands the auth group' do
+ it 'expands the patterns for the auth group' do
auth = subject.load_definitions.dig(
:'[Authentication and Authorization]',
:'@gitlab-org/manage/authentication-and-authorization')
@@ -95,6 +111,21 @@ RSpec.describe Tooling::FindCodeowners do
]
)
end
+
+ it 'retains the array and expands the patterns for the compliance group' do
+ compliance = subject.load_definitions.dig(
+ :'[Compliance]',
+ :'@gitlab-org/manage/compliance')
+
+ expect(compliance).to eq(
+ entries: %w[
+ /ee/app/services/audit_events/build_service.rb
+ ],
+ allow: %w[
+ /ee/app/services/audit_events/*
+ ]
+ )
+ end
end
describe '#load_config' do
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 10afcb18a73..f4eea28b66f 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a pattern' do
expect(subject.pattern(:unit))
- .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,events,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,components}{,/**/}*_spec.rb")
+ .to eq("spec/{bin,channels,config,db,dependencies,elastic,elastic_integration,experiments,factories,finders,frontend,graphql,haml_lint,helpers,initializers,lib,metrics_server,models,policies,presenters,rack_servers,replicators,routing,rubocop,scripts,serializers,services,sidekiq,sidekiq_cluster,spam,support_specs,tasks,uploaders,validators,views,workers,tooling,components}{,/**/}*_spec.rb")
end
end
@@ -121,7 +121,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is unit' do
it 'returns a regexp' do
expect(subject.regexp(:unit))
- .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|events|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|components)/})
+ .to eq(%r{spec/(bin|channels|config|db|dependencies|elastic|elastic_integration|experiments|factories|finders|frontend|graphql|haml_lint|helpers|initializers|lib|metrics_server|models|policies|presenters|rack_servers|replicators|routing|rubocop|scripts|serializers|services|sidekiq|sidekiq_cluster|spam|support_specs|tasks|uploaders|validators|views|workers|tooling|components)/})
end
end
diff --git a/spec/uploaders/avatar_uploader_spec.rb b/spec/uploaders/avatar_uploader_spec.rb
index 1fadd9425ef..a55e5c23fe8 100644
--- a/spec/uploaders/avatar_uploader_spec.rb
+++ b/spec/uploaders/avatar_uploader_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe AvatarUploader do
# in a stub below so we can set any path.
let_it_be(:path) { File.join('spec', 'fixtures', 'video_sample.mp4') }
- where(:mime_type) { described_class::MIME_WHITELIST }
+ where(:mime_type) { described_class::MIME_ALLOWLIST }
with_them do
include_context 'force content type detection to mime_type'
diff --git a/spec/uploaders/design_management/design_v432x230_uploader_spec.rb b/spec/uploaders/design_management/design_v432x230_uploader_spec.rb
index b3a106ef94b..a18a37e73da 100644
--- a/spec/uploaders/design_management/design_v432x230_uploader_spec.rb
+++ b/spec/uploaders/design_management/design_v432x230_uploader_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe DesignManagement::DesignV432x230Uploader do
# in a stub below so we can set any path.
let_it_be(:path) { File.join('spec', 'fixtures', 'dk.png') }
- where(:mime_type) { described_class::MIME_TYPE_WHITELIST }
+ where(:mime_type) { described_class::MIME_TYPE_ALLOWLIST }
with_them do
include_context 'force content type detection to mime_type'
diff --git a/spec/uploaders/favicon_uploader_spec.rb b/spec/uploaders/favicon_uploader_spec.rb
index 6bff3ff8a14..7f452075293 100644
--- a/spec/uploaders/favicon_uploader_spec.rb
+++ b/spec/uploaders/favicon_uploader_spec.rb
@@ -7,13 +7,13 @@ RSpec.describe FaviconUploader do
let_it_be(:uploader) { described_class.new(model, :favicon) }
context 'accept whitelist file content type' do
- include_context 'ignore extension whitelist check'
+ include_context 'ignore extension allowlist check'
# We need to feed through a valid path, but we force the parsed mime type
# in a stub below so we can set any path.
let_it_be(:path) { File.join('spec', 'fixtures', 'video_sample.mp4') }
- where(:mime_type) { described_class::MIME_WHITELIST }
+ where(:mime_type) { described_class::MIME_ALLOWLIST }
with_them do
include_context 'force content type detection to mime_type'
@@ -23,7 +23,7 @@ RSpec.describe FaviconUploader do
end
context 'upload non-whitelisted file content type' do
- include_context 'ignore extension whitelist check'
+ include_context 'ignore extension allowlist check'
let_it_be(:path) { File.join('spec', 'fixtures', 'sanitized.svg') }
@@ -31,7 +31,7 @@ RSpec.describe FaviconUploader do
end
context 'upload misnamed non-whitelisted file content type' do
- include_context 'ignore extension whitelist check'
+ include_context 'ignore extension allowlist check'
let_it_be(:path) { File.join('spec', 'fixtures', 'not_a_png.png') }
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index 1bcc43b81a8..a4f6116f7d7 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -256,8 +256,22 @@ RSpec.describe ObjectStorage do
describe '#use_open_file' do
context 'when file is stored locally' do
- it "returns the file" do
- expect { |b| uploader.use_open_file(&b) }.to yield_with_args(an_instance_of(ObjectStorage::Concern::OpenFile))
+ it "returns the file unlinked" do
+ expect { |b| uploader.use_open_file(&b) }.to yield_with_args(
+ satisfying do |f|
+ expect(f).to be_an_instance_of(ObjectStorage::Concern::OpenFile)
+ expect(f.file_path).to be_nil
+ end
+ )
+ end
+
+ it "returns the file not unlinked" do
+ expect { |b| uploader.use_open_file(unlink_early: false, &b) }.to yield_with_args(
+ satisfying do |f|
+ expect(f).to be_an_instance_of(ObjectStorage::Concern::OpenFile)
+ expect(File.exist?(f.file_path)).to be_truthy
+ end
+ )
end
end
diff --git a/spec/views/admin/identities/index.html.haml_spec.rb b/spec/views/admin/identities/index.html.haml_spec.rb
new file mode 100644
index 00000000000..3e8def003ae
--- /dev/null
+++ b/spec/views/admin/identities/index.html.haml_spec.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'admin/identities/index.html.haml', :aggregate_failures do
+ include Admin::IdentitiesHelper
+
+ let_it_be(:ldap_user) { create(:omniauth_user, provider: 'ldapmain', extern_uid: 'ldap-uid') }
+
+ before do
+ assign(:user, ldap_user)
+ view.lookup_context.prefixes = ['admin/identities']
+ end
+
+ context 'without identities' do
+ before do
+ assign(:identities, [])
+ end
+
+ it 'shows table headers' do
+ render
+
+ expect(rendered).to include('<th class="gl-border-t-0!">').exactly(5)
+ expect(rendered).to include(_('Provider'))
+ expect(rendered).to include(s_('Identity|Provider ID'))
+ expect(rendered).to include(_('Group'))
+ expect(rendered).to include(_('Identifier'))
+ expect(rendered).to include(_('Actions'))
+ end
+
+ it 'shows information text' do
+ render
+
+ expect(rendered).to include('<td colspan="5">').exactly(1)
+ expect(rendered).to include(_('This user has no identities'))
+ end
+ end
+
+ context 'with LDAP identities' do
+ before do
+ assign(:identities, ldap_user.identities)
+ end
+
+ it 'shows exactly 5 columns' do
+ render
+
+ expect(rendered).to include('</td>').exactly(5)
+ end
+
+ it 'shows identity without provider ID or group' do
+ render
+
+ # Provider
+ expect(rendered).to include('ldap (ldapmain)')
+ # Provider ID
+ expect(rendered).to include('data-testid="provider_id_blank"')
+ # Group
+ expect(rendered).to include('data-testid="saml_group_blank"')
+ # Identifier
+ expect(rendered).to include('ldap-uid')
+ end
+
+ it 'shows edit and delete identity buttons' do
+ render
+
+ expect(rendered).to include("aria-label=\"#{_('Edit')}\"")
+ expect(rendered).to include("aria-label=\"#{_('Delete identity')}\"")
+ end
+ end
+end
diff --git a/spec/views/devise/sessions/new.html.haml_spec.rb b/spec/views/devise/sessions/new.html.haml_spec.rb
index b3cd1493149..c8e9aa15287 100644
--- a/spec/views/devise/sessions/new.html.haml_spec.rb
+++ b/spec/views/devise/sessions/new.html.haml_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'devise/sessions/new' do
before do
stub_devise
disable_captcha
+ stub_feature_flags(restyle_login_page: false)
allow(Gitlab).to receive(:com?).and_return(true)
end
@@ -29,67 +30,74 @@ RSpec.describe 'devise/sessions/new' do
end
end
- describe 'ldap' do
- include LdapHelpers
-
- let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access }
-
+ flag_values = [true, false]
+ flag_values.each do |val|
before do
- enable_ldap
- stub_devise
- disable_captcha
- disable_sign_up
- disable_other_signin_methods
-
- allow(view).to receive(:experiment_enabled?).and_return(false)
+ stub_feature_flags(restyle_login_page: val)
end
- it 'is shown when enabled' do
- render
+ describe 'ldap' do
+ include LdapHelpers
- expect(rendered).to have_selector('.new-session-tabs')
- expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
- expect(rendered).to have_field('LDAP Username')
- end
+ let(:server) { { provider_name: 'ldapmain', label: 'LDAP' }.with_indifferent_access }
- it 'is not shown when LDAP sign in is disabled' do
- disable_ldap_sign_in
+ before do
+ enable_ldap
+ stub_devise
+ disable_captcha
+ disable_sign_up
+ disable_other_signin_methods
- render
+ allow(view).to receive(:experiment_enabled?).and_return(false)
+ end
- expect(rendered).to have_content('No authentication methods configured')
- expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
- expect(rendered).not_to have_field('LDAP Username')
- end
- end
+ it 'is shown when enabled' do
+ render
- describe 'Google Tag Manager' do
- let!(:gtm_id) { 'GTM-WWKMTWS'}
+ expect(rendered).to have_selector('.new-session-tabs')
+ expect(rendered).to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
+ expect(rendered).to have_field('LDAP Username')
+ end
- subject { rendered }
+ it 'is not shown when LDAP sign in is disabled' do
+ disable_ldap_sign_in
- before do
- stub_devise
- disable_captcha
- stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id })
+ render
+
+ expect(rendered).to have_content('No authentication methods configured')
+ expect(rendered).not_to have_selector('[data-qa-selector="ldap_tab"]') # rubocop:disable QA/SelectorUsage
+ expect(rendered).not_to have_field('LDAP Username')
+ end
end
- describe 'when Google Tag Manager is enabled' do
+ describe 'Google Tag Manager' do
+ let!(:gtm_id) { 'GTM-WWKMTWS' }
+
+ subject { rendered }
+
before do
- enable_gtm
- render
+ stub_devise
+ disable_captcha
+ stub_config(extra: { google_tag_manager_id: gtm_id, google_tag_manager_nonce_id: gtm_id })
end
- it { is_expected.to match /www.googletagmanager.com/ }
- end
+ describe 'when Google Tag Manager is enabled' do
+ before do
+ enable_gtm
+ render
+ end
- describe 'when Google Tag Manager is disabled' do
- before do
- disable_gtm
- render
+ it { is_expected.to match /www.googletagmanager.com/ }
end
- it { is_expected.not_to match /www.googletagmanager.com/ }
+ describe 'when Google Tag Manager is disabled' do
+ before do
+ disable_gtm
+ render
+ end
+
+ it { is_expected.not_to match /www.googletagmanager.com/ }
+ end
end
end
diff --git a/spec/views/groups/group_members/index.html.haml_spec.rb b/spec/views/groups/group_members/index.html.haml_spec.rb
index 2d7d50555d6..c7aebb94a45 100644
--- a/spec/views/groups/group_members/index.html.haml_spec.rb
+++ b/spec/views/groups/group_members/index.html.haml_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe 'groups/group_members/index', :aggregate_failures do
render
expect(rendered).to have_content('Group members')
- expect(rendered).to have_content('You can invite a new member')
+ expect(rendered).to have_content("You're viewing members")
expect(rendered).to have_selector('.js-invite-group-trigger')
expect(rendered).to have_selector('.js-invite-members-trigger')
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index 208da345e7f..79c22871b44 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -40,7 +40,10 @@ RSpec.describe 'layouts/header/_new_dropdown' do
it 'has a "New subgroup" link' do
render
- expect(rendered).to have_link('New subgroup', href: new_group_path(parent_id: group.id))
+ expect(rendered).to have_link(
+ 'New subgroup',
+ href: new_group_path(parent_id: group.id, anchor: 'create-group-pane')
+ )
end
end
diff --git a/spec/views/projects/blob/_viewer.html.haml_spec.rb b/spec/views/projects/blob/_viewer.html.haml_spec.rb
index 893cfec1491..2761d10f9ad 100644
--- a/spec/views/projects/blob/_viewer.html.haml_spec.rb
+++ b/spec/views/projects/blob/_viewer.html.haml_spec.rb
@@ -24,6 +24,7 @@ RSpec.describe 'projects/blob/_viewer.html.haml' do
before do
assign(:project, project)
assign(:blob, blob)
+ assign(:ref, 'master')
assign(:id, File.join('master', blob.path))
controller.params[:controller] = 'projects/blob'
diff --git a/spec/views/projects/pages/new.html.haml_spec.rb b/spec/views/projects/pages/new.html.haml_spec.rb
new file mode 100644
index 00000000000..919b2fe84ee
--- /dev/null
+++ b/spec/views/projects/pages/new.html.haml_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'projects/pages/new' do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(project).to receive(:show_pages_onboarding?).and_return(true)
+ project.add_maintainer(user)
+
+ assign(:project, project)
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ describe 'with onboarding wizard feature enabled' do
+ before do
+ Feature.enable(:use_pipeline_wizard_for_pages)
+ end
+
+ it "shows the onboarding wizard" do
+ render
+ expect(rendered).to have_selector('#js-pages')
+ end
+ end
+
+ describe 'with onboarding wizard feature disabled' do
+ before do
+ Feature.disable(:use_pipeline_wizard_for_pages)
+ end
+
+ it "does not show the onboarding wizard" do
+ render
+ expect(rendered).not_to have_selector('#js-pages')
+ end
+
+ it "renders the usage instructions" do
+ render
+ expect(rendered).to render_template('projects/pages/_use')
+ end
+ end
+end
diff --git a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb
index e650e183bc8..37c9908af1d 100644
--- a/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb
+++ b/spec/views/projects/pipeline_schedules/_pipeline_schedule.html.haml_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'projects/pipeline_schedules/_pipeline_schedule' do
it 'non-owner can take ownership of pipeline' do
render
- expect(rendered).to have_link('Take ownership')
+ expect(rendered).to have_button('Take ownership')
end
end
@@ -42,7 +42,7 @@ RSpec.describe 'projects/pipeline_schedules/_pipeline_schedule' do
it 'owner cannot take ownership of pipeline' do
render
- expect(rendered).not_to have_link('Take ownership')
+ expect(rendered).not_to have_button('Take ownership')
end
end
end
diff --git a/spec/workers/archive_trace_worker_spec.rb b/spec/workers/archive_trace_worker_spec.rb
deleted file mode 100644
index a9f256b1b3b..00000000000
--- a/spec/workers/archive_trace_worker_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ArchiveTraceWorker do
- describe '#perform' do
- subject { described_class.new.perform(job&.id) }
-
- context 'when job is found' do
- let(:job) { create(:ci_build, :trace_live) }
-
- it 'executes service' do
- expect_any_instance_of(Ci::ArchiveTraceService)
- .to receive(:execute).with(job, anything)
-
- subject
- end
- end
-
- context 'when job is not found' do
- let(:job) { nil }
-
- it 'does not execute service' do
- expect_any_instance_of(Ci::ArchiveTraceService)
- .not_to receive(:execute)
-
- subject
- end
- end
- end
-end
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
deleted file mode 100644
index 2ff173c1558..00000000000
--- a/spec/workers/build_finished_worker_spec.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BuildFinishedWorker do
- let(:worker) { described_class.new }
-
- subject { worker.perform(build.id) }
-
- describe '#perform' do
- context 'when build exists' do
- let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
-
- before do
- expect(Ci::Build).to receive(:find_by).with({ id: build.id }).and_return(build)
- end
-
- it 'calculates coverage and calls hooks', :aggregate_failures do
- expect(build).to receive(:update_coverage).ordered
-
- expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
- expect(build_report_result_service).to receive(:execute).with(build)
- end
-
- expect(BuildHooksWorker).to receive(:perform_async)
- expect(ChatNotificationWorker).not_to receive(:perform_async)
- expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
-
- subject
- end
-
- context 'when build is failed' do
- before do
- build.update!(status: :failed)
- end
-
- it 'adds a todo' do
- expect(::Ci::MergeRequests::AddTodoWhenBuildFailsWorker).to receive(:perform_async)
-
- subject
- end
- end
-
- context 'when build has a chat' do
- before do
- build.pipeline.update!(source: :chat)
- end
-
- it 'schedules a ChatNotification job' do
- expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
-
- subject
- end
- end
-
- context 'when project is deleted' do
- before do
- allow(build).to receive(:project).and_return(nil)
- end
-
- it 'does no processing' do
- expect(worker).not_to receive(:process_build)
-
- subject
- end
- end
-
- context 'when project is pending_delete' do
- before do
- build.project.update_attribute(:pending_delete, true)
- end
-
- it 'does no processing' do
- expect(worker).not_to receive(:process_build)
-
- subject
- end
- end
- end
-
- context 'when build does not exist' do
- it 'does not raise exception' do
- expect { described_class.new.perform(non_existing_record_id) }
- .not_to raise_error
- end
- end
- end
-end
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index 426eb03638c..80dc36d268f 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -23,8 +23,8 @@ RSpec.describe BuildHooksWorker do
end
end
- describe '.perform_async' do
- it 'sends a message to the application logger, before performing', :sidekiq_inline do
+ describe '.perform_async', :sidekiq_inline do
+ it 'sends a message to the application logger, before performing' do
build = create(:ci_build)
expect(Gitlab::AppLogger).to receive(:info).with(
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
index 201182636e7..5ddaabc3938 100644
--- a/spec/workers/ci/build_finished_worker_spec.rb
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::BuildFinishedWorker do
+ include AfterNextHelpers
+
subject { described_class.new.perform(build.id) }
describe '#perform' do
@@ -16,17 +18,28 @@ RSpec.describe Ci::BuildFinishedWorker do
it 'calculates coverage and calls hooks', :aggregate_failures do
expect(build).to receive(:update_coverage).ordered
- expect_next_instance_of(Ci::BuildReportResultService) do |build_report_result_service|
- expect(build_report_result_service).to receive(:execute).with(build)
- end
+ expect_next(Ci::BuildReportResultService).to receive(:execute).with(build)
- expect(BuildHooksWorker).to receive(:perform_async)
+ expect(build).to receive(:execute_hooks)
expect(ChatNotificationWorker).not_to receive(:perform_async)
expect(Ci::ArchiveTraceWorker).to receive(:perform_in)
subject
end
+ context 'when the execute_build_hooks_inline feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_build_hooks_inline: false)
+ end
+
+ it 'uses the BuildHooksWorker' do
+ expect(build).not_to receive(:execute_hooks)
+ expect(BuildHooksWorker).to receive(:perform_async).with(build)
+
+ subject
+ end
+ end
+
context 'when build is failed' do
before do
build.update!(status: :failed)
diff --git a/spec/workers/ci/cancel_pipeline_worker_spec.rb b/spec/workers/ci/cancel_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..6165aaff1c7
--- /dev/null
+++ b/spec/workers/ci/cancel_pipeline_worker_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::CancelPipelineWorker, :aggregate_failures do
+ let!(:pipeline) { create(:ci_pipeline, :running) }
+
+ describe '#perform' do
+ subject(:perform) { described_class.new.perform(pipeline.id, pipeline.id) }
+
+ it 'calls cancel_running' do
+ allow(::Ci::Pipeline).to receive(:find_by_id).and_return(pipeline)
+ expect(pipeline).to receive(:cancel_running).with(
+ auto_canceled_by_pipeline_id: pipeline.id,
+ cascade_to_children: false
+ )
+
+ perform
+ end
+
+ context 'if pipeline is deleted' do
+ subject(:perform) { described_class.new.perform(non_existing_record_id, non_existing_record_id) }
+
+ it 'does not error' do
+ expect(pipeline).not_to receive(:cancel_running)
+
+ perform
+ end
+ end
+
+ describe 'with builds and state transition side effects', :sidekiq_inline do
+ let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
+
+ it_behaves_like 'an idempotent worker', :sidekiq_inline do
+ let(:job_args) { [pipeline.id, pipeline.id] }
+
+ it 'cancels the pipeline' do
+ perform
+
+ pipeline.reload
+
+ expect(pipeline).to be_canceled
+ expect(pipeline.builds.first).to be_canceled
+ expect(pipeline.builds.first.auto_canceled_by_id).to eq pipeline.id
+ expect(pipeline.auto_canceled_by_id).to eq pipeline.id
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
new file mode 100644
index 00000000000..ff67266c3e8
--- /dev/null
+++ b/spec/workers/ci/runners/process_runner_version_update_worker_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Runners::ProcessRunnerVersionUpdateWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:version) { '1.0.0' }
+ let(:job_args) { version }
+
+ include_examples 'an idempotent worker' do
+ subject(:perform_twice) { perform_multiple(job_args, worker: worker, exec_times: 2) }
+
+ let(:service) { ::Ci::Runners::ProcessRunnerVersionUpdateService.new(version) }
+ let(:available_runner_releases) do
+ %w[1.0.0 1.0.1]
+ end
+
+ before do
+ allow(Ci::Runners::ProcessRunnerVersionUpdateService).to receive(:new).and_return(service)
+ allow(service).to receive(:execute).and_call_original
+
+ url = ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+
+ WebMock.stub_request(:get, url).to_return(
+ body: available_runner_releases.map { |v| { name: v } }.to_json,
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ it 'logs the service result', :aggregate_failures do
+ perform_twice
+
+ expect(Ci::Runners::ProcessRunnerVersionUpdateService).to have_received(:new).twice
+ expect(service).to have_received(:execute).twice
+ expect(worker.logging_extras).to eq(
+ {
+ 'extra.ci_runners_process_runner_version_update_worker.status' => :success,
+ 'extra.ci_runners_process_runner_version_update_worker.message' => nil,
+ 'extra.ci_runners_process_runner_version_update_worker.upgrade_status' => 'recommended'
+ }
+ )
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
new file mode 100644
index 00000000000..1292df62ce5
--- /dev/null
+++ b/spec/workers/ci/runners/reconcile_existing_runner_versions_cron_worker_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::Runners::ReconcileExistingRunnerVersionsCronWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ context 'when scheduled by cronjob' do
+ it 'reschedules itself' do
+ expect(described_class).to(receive(:perform_in).with(a_value_between(0, 12.hours.in_seconds), false))
+ expect(::Ci::Runners::ReconcileExistingRunnerVersionsService).not_to receive(:new)
+
+ worker.perform
+ end
+ end
+
+ context 'when self-scheduled' do
+ include_examples 'an idempotent worker' do
+ subject(:perform) { perform_multiple(false, worker: worker) }
+
+ it 'executes the service' do
+ expect_next_instance_of(Ci::Runners::ReconcileExistingRunnerVersionsService) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.success)
+ end.exactly(worker_exec_times)
+
+ perform
+ end
+ end
+
+ it 'logs the service result' do
+ expect_next_instance_of(Ci::Runners::ReconcileExistingRunnerVersionsService) do |service|
+ expect(service).to receive(:execute)
+ .and_return(ServiceResponse.success(payload: { some_job_result_key: 'some_value' }))
+ end
+
+ worker.perform(false)
+
+ expect(worker.logging_extras).to eq({
+ 'extra.ci_runners_reconcile_existing_runner_versions_cron_worker.some_job_result_key' => 'some_value'
+ })
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/track_failed_build_worker_spec.rb b/spec/workers/ci/track_failed_build_worker_spec.rb
new file mode 100644
index 00000000000..12d0e64afc5
--- /dev/null
+++ b/spec/workers/ci/track_failed_build_worker_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Ci::TrackFailedBuildWorker do
+ let_it_be(:build) { create(:ci_build, :failed, :sast_report) }
+ let_it_be(:exit_code) { 42 }
+ let_it_be(:failure_reason) { "script_failure" }
+
+ subject { described_class.new.perform(build.id, exit_code, failure_reason) }
+
+ describe '#perform' do
+ context 'when a build has failed' do
+ it 'executes track service' do
+ expect(Ci::TrackFailedBuildService)
+ .to receive(:new)
+ .with(build: build, exit_code: exit_code, failure_reason: failure_reason)
+ .and_call_original
+
+ subject
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [build.id, exit_code, failure_reason] }
+ end
+ end
+end
diff --git a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
index eeccdbd0e2d..0e3fa350fcd 100644
--- a/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
+++ b/spec/workers/concerns/limited_capacity/job_tracker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe LimitedCapacity::JobTracker, :clean_gitlab_redis_shared_state do
describe '#register' do
it 'adds jid to the set' do
- expect(job_tracker.register('a-job-id', max_jids)). to be true
+ expect(job_tracker.register('a-job-id', max_jids)).to be true
expect(job_tracker.running_jids).to contain_exactly('a-job-id')
end
diff --git a/spec/workers/concerns/waitable_worker_spec.rb b/spec/workers/concerns/waitable_worker_spec.rb
index f6d4cc4679d..bf156c3b8cb 100644
--- a/spec/workers/concerns/waitable_worker_spec.rb
+++ b/spec/workers/concerns/waitable_worker_spec.rb
@@ -30,19 +30,33 @@ RSpec.describe WaitableWorker do
describe '.bulk_perform_and_wait' do
context '1 job' do
- it 'inlines the job' do
- args_list = [[1]]
- expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
- expect(Gitlab::AppJsonLogger).to(
- receive(:info).with(a_hash_including('message' => 'running inline',
- 'class' => 'Gitlab::Foo::Bar::DummyWorker',
- 'job_status' => 'running',
- 'queue' => 'foo_bar_dummy'))
- .once)
-
- worker.bulk_perform_and_wait(args_list)
-
- expect(worker.counter).to eq(1)
+ it 'runs the jobs asynchronously' do
+ arguments = [[1]]
+
+ expect(worker).to receive(:bulk_perform_async).with(arguments)
+
+ worker.bulk_perform_and_wait(arguments)
+ end
+
+ context 'when the feature flag `always_async_project_authorizations_refresh` is turned off' do
+ before do
+ stub_feature_flags(always_async_project_authorizations_refresh: false)
+ end
+
+ it 'inlines the job' do
+ args_list = [[1]]
+ expect(worker).to receive(:bulk_perform_inline).with(args_list).and_call_original
+ expect(Gitlab::AppJsonLogger).to(
+ receive(:info).with(a_hash_including('message' => 'running inline',
+ 'class' => 'Gitlab::Foo::Bar::DummyWorker',
+ 'job_status' => 'running',
+ 'queue' => 'foo_bar_dummy'))
+ .once)
+
+ worker.bulk_perform_and_wait(args_list)
+
+ expect(worker.counter).to eq(1)
+ end
end
end
diff --git a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
index 2b4a42060d9..dfe7a266be2 100644
--- a/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration/ci_database_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigration::CiDatabaseWorker, :clean_gitlab_redis_shared_state do
- it_behaves_like 'it runs batched background migration jobs', :ci
+ it_behaves_like 'it runs batched background migration jobs', :ci, :ci_builds
end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
index a6c7db60abe..e57bd7581c2 100644
--- a/spec/workers/database/batched_background_migration_worker_spec.rb
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -3,5 +3,5 @@
require 'spec_helper'
RSpec.describe Database::BatchedBackgroundMigrationWorker do
- it_behaves_like 'it runs batched background migration jobs', :main
+ it_behaves_like 'it runs batched background migration jobs', :main, :events
end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index e8ec7c28537..4a1bf7dbbf9 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -382,6 +382,7 @@ RSpec.describe 'Every Sidekiq worker' do
'ProjectScheduleBulkRepositoryShardMovesWorker' => 3,
'ProjectTemplateExportWorker' => false,
'ProjectUpdateRepositoryStorageWorker' => 3,
+ 'Projects::DisableLegacyOpenSourceLicenseForInactiveProjectsWorker' => 3,
'Projects::GitGarbageCollectWorker' => false,
'Projects::InactiveProjectsDeletionNotificationWorker' => 3,
'Projects::PostCreationWorker' => 3,
diff --git a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
index b3c6a48767c..932152c0764 100644
--- a/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/stage/import_issue_events_worker_spec.rb
@@ -8,37 +8,66 @@ RSpec.describe Gitlab::GithubImport::Stage::ImportIssueEventsWorker do
let(:project) { create(:project) }
let!(:group) { create(:group, projects: [project]) }
let(:feature_flag_state) { [group] }
+ let(:single_endpoint_feature_flag_state) { [group] }
describe '#import' do
let(:importer) { instance_double('Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter') }
let(:client) { instance_double('Gitlab::GithubImport::Client') }
before do
+ stub_feature_flags(github_importer_single_endpoint_issue_events_import: single_endpoint_feature_flag_state)
stub_feature_flags(github_importer_issue_events_import: feature_flag_state)
end
- it 'imports all the issue events' do
- waiter = Gitlab::JobWaiter.new(2, '123')
+ context 'when single endpoint feature flag enabled' do
+ it 'imports all the issue events' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
- expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
- .to receive(:new)
- .with(project, client)
- .and_return(importer)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
- expect(importer).to receive(:execute).and_return(waiter)
+ expect(importer).to receive(:execute).and_return(waiter)
- expect(Gitlab::GithubImport::AdvanceStageWorker)
- .to receive(:perform_async)
- .with(project.id, { '123' => 2 }, :notes)
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
- worker.import(client, project)
+ worker.import(client, project)
+ end
+ end
+
+ context 'when import issue events feature flag enabled' do
+ let(:single_endpoint_feature_flag_state) { false }
+
+ it 'imports the issue events partly' do
+ waiter = Gitlab::JobWaiter.new(2, '123')
+
+ expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter)
+ .to receive(:new)
+ .with(project, client)
+ .and_return(importer)
+
+ expect(importer).to receive(:execute).and_return(waiter)
+
+ expect(Gitlab::GithubImport::AdvanceStageWorker)
+ .to receive(:perform_async)
+ .with(project.id, { '123' => 2 }, :notes)
+
+ worker.import(client, project)
+ end
end
- context 'when feature flag is disabled' do
+ context 'when feature flags are disabled' do
let(:feature_flag_state) { false }
+ let(:single_endpoint_feature_flag_state) { false }
it 'skips issue events import and calls next stage' do
expect(Gitlab::GithubImport::Importer::SingleEndpointIssueEventsImporter).not_to receive(:new)
+ expect(Gitlab::GithubImport::Importer::IssueEventsImporter).not_to receive(:new)
expect(Gitlab::GithubImport::AdvanceStageWorker).to receive(:perform_async).with(project.id, {}, :notes)
worker.import(client, project)
diff --git a/spec/workers/merge_requests/create_approval_event_worker_spec.rb b/spec/workers/merge_requests/create_approval_event_worker_spec.rb
new file mode 100644
index 00000000000..8389949ecc9
--- /dev/null
+++ b/spec/workers/merge_requests/create_approval_event_worker_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateApprovalEventWorker do
+ let!(:user) { create(:user) }
+ let!(:project) { create(:project) }
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls MergeRequests::CreateApprovalEventService' do
+ expect_next_instance_of(
+ MergeRequests::CreateApprovalEventService,
+ project: project, current_user: user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'does not call MergeRequests::CreateApprovalEventService' do
+ expect(MergeRequests::CreateApprovalEventService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+end
diff --git a/spec/workers/merge_requests/create_approval_note_worker_spec.rb b/spec/workers/merge_requests/create_approval_note_worker_spec.rb
new file mode 100644
index 00000000000..f58d38599fc
--- /dev/null
+++ b/spec/workers/merge_requests/create_approval_note_worker_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreateApprovalNoteWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls SystemNoteService.approve_mr' do
+ expect(SystemNoteService).to receive(:approve_mr).with(merge_request, user)
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call SystemNoteService.approve_mr' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(SystemNoteService).not_to receive(:approve_mr)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb b/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb
new file mode 100644
index 00000000000..0130ef63f50
--- /dev/null
+++ b/spec/workers/merge_requests/execute_approval_hooks_worker_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ExecuteApprovalHooksWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls MergeRequests::ExecuteApprovalHooksService' do
+ expect_next_instance_of(
+ MergeRequests::ExecuteApprovalHooksService,
+ project: project, current_user: user
+ ) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call MergeRequests::ExecuteApprovalHooksService' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(MergeRequests::ExecuteApprovalHooksService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb b/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb
new file mode 100644
index 00000000000..f8316a8ff05
--- /dev/null
+++ b/spec/workers/merge_requests/resolve_todos_after_approval_worker_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ResolveTodosAfterApprovalWorker do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:data) { { current_user_id: user.id, merge_request_id: merge_request.id } }
+ let(:approved_event) { MergeRequests::ApprovedEvent.new(data: data) }
+
+ it_behaves_like 'subscribes to event' do
+ let(:event) { approved_event }
+ end
+
+ it 'calls TodoService#resolve_todos_for_target' do
+ expect_next_instance_of(TodoService) do |todo_service|
+ expect(todo_service)
+ .to receive(:resolve_todos_for_target)
+ .with(merge_request, user)
+ end
+
+ consume_event(subscriber: described_class, event: approved_event)
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'logs and does not call TodoService#resolve_todos_for_target' do
+ expect(Sidekiq.logger).to receive(:info).with(hash_including(log_payload))
+ expect(TodoService).not_to receive(:new)
+
+ expect { consume_event(subscriber: described_class, event: approved_event) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Current user not found.', 'current_user_id' => user.id } }
+ end
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist' do
+ let(:log_payload) { { 'message' => 'Merge request not found.', 'merge_request_id' => merge_request.id } }
+ end
+ end
+end
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
index 35b83c3bee8..b9053b10419 100644
--- a/spec/workers/new_issue_worker_spec.rb
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -74,6 +74,8 @@ RSpec.describe NewIssueWorker do
it 'creates a new event record' do
expect { worker.perform(issue.id, user.id) }.to change { Event.count }.from(0).to(1)
+
+ expect(Event.last).to have_attributes(target_id: issue.id, target_type: 'Issue')
end
it 'creates a notification for the mentioned user' do
@@ -89,6 +91,14 @@ RSpec.describe NewIssueWorker do
worker.perform(issue.id, user.id)
end
+
+ context 'when a class is set' do
+ it 'creates event with the correct type' do
+ expect { worker.perform(issue.id, user.id, 'WorkItem') }.to change { Event.count }.from(0).to(1)
+
+ expect(Event.last).to have_attributes(target_id: issue.id, target_type: 'WorkItem')
+ end
+ end
end
end
end
diff --git a/spec/workers/packages/cleanup/execute_policy_worker_spec.rb b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
index 81fcec1a360..6325a82ed3d 100644
--- a/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
+++ b/spec/workers/packages/cleanup/execute_policy_worker_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe Packages::Cleanup::ExecutePolicyWorker do
end
describe '#remaining_work_count' do
- subject { worker.remaining_work_count}
+ subject { worker.remaining_work_count }
context 'with no policies' do
it { is_expected.to eq(0) }
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
index 1c1586ef199..9272e26a34f 100644
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -13,8 +13,8 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'subscribes to event'
it 'clears the cache with Gitlab::Pages::CacheControl' do
- caches.each do |cache_type, cache_id|
- expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache_type, id: cache_id) do |cache_control|
+ caches.each do |cache|
+ expect_next_instance_of(Gitlab::Pages::CacheControl, type: cache[:type], id: cache[:id]) do |cache_control|
expect(cache_control).to receive(:clear_cache)
end
end
@@ -26,20 +26,120 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
it_behaves_like 'clears caches with',
event_class: Pages::PageDeployedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Pages::PageDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectCreatedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
- caches: { namespace: 3, project: 1 }
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectArchivedEvent,
+ event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectPathChangedEvent,
+ event_data: {
+ project_id: 1,
+ namespace_id: 2,
+ root_namespace_id: 3,
+ old_path: 'old_path',
+ new_path: 'new_path'
+ },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :project, id: 1 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectTransferedEvent,
+ event_data: {
+ project_id: 1,
+ old_namespace_id: 2,
+ old_root_namespace_id: 3,
+ new_namespace_id: 4,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 3 },
+ { type: :namespace, id: 5 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupTransferedEvent,
+ event_data: {
+ group_id: 1,
+ old_root_namespace_id: 3,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :namespace, id: 3 },
+ { type: :namespace, id: 5 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupPathChangedEvent,
+ event_data: {
+ group_id: 1,
+ root_namespace_id: 2,
+ old_path: 'old_path',
+ new_path: 'new_path'
+ },
+ caches: [
+ { type: :namespace, id: 2 }
+ ]
+
+ it_behaves_like 'clears caches with',
+ event_class: Groups::GroupDeletedEvent,
+ event_data: {
+ group_id: 1,
+ root_namespace_id: 3
+ },
+ caches: [
+ { type: :namespace, id: 3 }
+ ]
+
+ context 'when namespace based cache keys are duplicated' do
+ # de-dups namespace cache keys
+ it_behaves_like 'clears caches with',
+ event_class: Projects::ProjectTransferedEvent,
+ event_data: {
+ project_id: 1,
+ old_namespace_id: 2,
+ old_root_namespace_id: 5,
+ new_namespace_id: 4,
+ new_root_namespace_id: 5
+ },
+ caches: [
+ { type: :project, id: 1 },
+ { type: :namespace, id: 5 }
+ ]
+ end
end
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index 4ddb793516f..d632ca39e44 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -452,6 +452,12 @@ RSpec.describe PostReceive do
perform
end
+ it 'updates the snippet model updated_at' do
+ expect(snippet).to receive(:touch)
+
+ perform
+ end
+
it 'updates snippet statistics' do
expect(Snippets::UpdateStatisticsService).to receive(:new).with(snippet).and_call_original
diff --git a/spec/workers/project_cache_worker_spec.rb b/spec/workers/project_cache_worker_spec.rb
index 7f42c700ce4..30c85464452 100644
--- a/spec/workers/project_cache_worker_spec.rb
+++ b/spec/workers/project_cache_worker_spec.rb
@@ -115,7 +115,7 @@ RSpec.describe ProjectCacheWorker do
.twice
expect(UpdateProjectStatisticsWorker).to receive(:perform_in)
- .with(lease_timeout, project.id, statistics)
+ .with(lease_timeout, lease_key, project.id, statistics)
.and_call_original
expect(Namespaces::ScheduleAggregationWorker)
diff --git a/spec/workers/projects/import_export/relation_export_worker_spec.rb b/spec/workers/projects/import_export/relation_export_worker_spec.rb
new file mode 100644
index 00000000000..236650fe55b
--- /dev/null
+++ b/spec/workers/projects/import_export/relation_export_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::ImportExport::RelationExportWorker, type: :worker do
+ let(:project_relation_export) { create(:project_relation_export) }
+ let(:job_args) { [project_relation_export.id] }
+
+ it_behaves_like 'an idempotent worker'
+
+ describe '#perform' do
+ subject(:worker) { described_class.new }
+
+ context 'when relation export has initial state queued' do
+ let(:project_relation_export) { create(:project_relation_export) }
+
+ it 'calls RelationExportService' do
+ expect_next_instance_of(Projects::ImportExport::RelationExportService) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ worker.perform(project_relation_export.id)
+ end
+ end
+
+ context 'when relation export does not have queued state' do
+ let(:project_relation_export) { create(:project_relation_export, status_event: :start) }
+
+ it 'does not call RelationExportService' do
+ expect(Projects::ImportExport::RelationExportService).not_to receive(:new)
+
+ worker.perform(project_relation_export.id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
index 6007d3b34f8..2562a7bc6fe 100644
--- a/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
+++ b/spec/workers/remove_unreferenced_lfs_objects_worker_spec.rb
@@ -6,12 +6,12 @@ RSpec.describe RemoveUnreferencedLfsObjectsWorker do
let(:worker) { described_class.new }
describe '#perform' do
- let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1') }
- let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2') }
+ let!(:unreferenced_lfs_object1) { create(:lfs_object, oid: '1' * 64) }
+ let!(:unreferenced_lfs_object2) { create(:lfs_object, oid: '2' * 64) }
let!(:project1) { create(:project, lfs_enabled: true) }
let!(:project2) { create(:project, lfs_enabled: true) }
- let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3') }
- let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4') }
+ let!(:referenced_lfs_object1) { create(:lfs_object, oid: '3' * 64) }
+ let!(:referenced_lfs_object2) { create(:lfs_object, oid: '4' * 64) }
let!(:lfs_objects_project1_1) do
create(:lfs_objects_project,
project: project1,
diff --git a/spec/workers/update_project_statistics_worker_spec.rb b/spec/workers/update_project_statistics_worker_spec.rb
index 1f840e363ea..2f356376d7c 100644
--- a/spec/workers/update_project_statistics_worker_spec.rb
+++ b/spec/workers/update_project_statistics_worker_spec.rb
@@ -3,17 +3,35 @@
require 'spec_helper'
RSpec.describe UpdateProjectStatisticsWorker do
+ include ExclusiveLeaseHelpers
+
let(:worker) { described_class.new }
let(:project) { create(:project, :repository) }
let(:statistics) { %w(repository_size) }
+ let(:lease_key) { "namespace:namespaces_root_statistics:#{project.namespace_id}" }
describe '#perform' do
- it 'updates the project statistics' do
- expect(Projects::UpdateStatisticsService).to receive(:new)
- .with(project, nil, statistics: statistics)
- .and_call_original
+ context 'when a lease could be obtained' do
+ it 'updates the project statistics' do
+ expect(Projects::UpdateStatisticsService).to receive(:new)
+ .with(project, nil, statistics: statistics)
+ .and_call_original
+
+ worker.perform(lease_key, project.id, statistics)
+ end
+ end
+
+ context 'when a lease could not be obtained' do
+ before do
+ stub_exclusive_lease_taken(lease_key, timeout: ProjectCacheWorker::LEASE_TIMEOUT)
+ end
+
+ it 'does not update the project statistics' do
+ lease_key = "namespace:namespaces_root_statistics:#{project.namespace_id}"
+ expect(Projects::UpdateStatisticsService).not_to receive(:new)
- worker.perform(project.id, statistics)
+ worker.perform(lease_key, project.id, statistics)
+ end
end
end
end
diff --git a/spec/workers/users/deactivate_dormant_users_worker_spec.rb b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
index 297301c45e2..263ca31e0a0 100644
--- a/spec/workers/users/deactivate_dormant_users_worker_spec.rb
+++ b/spec/workers/users/deactivate_dormant_users_worker_spec.rb
@@ -25,20 +25,13 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
context 'when automatic deactivation of dormant users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
- stub_const("#{described_class.name}::PAUSE_SECONDS", 0)
end
it 'deactivates dormant users' do
- freeze_time do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
-
- expect(worker).to receive(:sleep).twice
-
- worker.perform
+ worker.perform
- expect(User.dormant.count).to eq(0)
- expect(User.with_no_activity.count).to eq(0)
- end
+ expect(User.dormant.count).to eq(0)
+ expect(User.with_no_activity.count).to eq(0)
end
where(:user_type, :expected_state) do
@@ -78,6 +71,14 @@ RSpec.describe Users::DeactivateDormantUsersWorker do
expect(inactive_recently_created.reload.state).to eq('active')
end
+
+ it 'triggers update of highest user role for deactivated users', :clean_gitlab_redis_shared_state do
+ [dormant, inactive].each do |user|
+ expect(UpdateHighestRoleWorker).to receive(:perform_in).with(anything, user.id)
+ end
+
+ worker.perform
+ end
end
context 'when automatic deactivation of dormant users is disabled' do